ArmNN
 21.08
NetworkTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "GraphUtils.hpp"
7 
9 
10 #include <Network.hpp>
11 
12 #include <doctest/doctest.h>
13 
14 namespace
15 {
16 
17 bool AreAllLayerInputSlotsConnected(const armnn::IConnectableLayer& layer)
18 {
19  bool allConnected = true;
20  for (unsigned int i = 0; i < layer.GetNumInputSlots(); ++i)
21  {
22  const bool inputConnected = layer.GetInputSlot(i).GetConnection() != nullptr;
23  allConnected &= inputConnected;
24  }
25  return allConnected;
26 }
27 
28 }
29 
30 TEST_SUITE("Network")
31 {
32 TEST_CASE("LayerGuids")
33 {
35  armnn::LayerGuid inputId = net.AddInputLayer(0)->GetGuid();
36  armnn::LayerGuid addId = net.AddAdditionLayer()->GetGuid();
37  armnn::LayerGuid outputId = net.AddOutputLayer(0)->GetGuid();
38 
39  CHECK(inputId != addId);
40  CHECK(addId != outputId);
41  CHECK(inputId != outputId);
42 }
43 
44 TEST_CASE("NetworkBasic")
45 {
47  CHECK(net.PrintGraph() == armnn::Status::Success);
48 }
49 
50 TEST_CASE("LayerNamesAreOptionalForINetwork")
51 {
53  inet->AddInputLayer(0);
54  inet->AddAdditionLayer();
55  inet->AddActivationLayer(armnn::ActivationDescriptor());
56  inet->AddOutputLayer(0);
57 }
58 
59 TEST_CASE("LayerNamesAreOptionalForNetwork")
60 {
62  net.AddInputLayer(0);
63  net.AddAdditionLayer();
65  net.AddOutputLayer(0);
66 }
67 
68 TEST_CASE("NetworkModification")
69 {
71 
72  armnn::IConnectableLayer* const inputLayer = net.AddInputLayer(0, "input layer");
73  CHECK(inputLayer);
74 
75  unsigned int dims[] = { 10,1,1,1 };
76  std::vector<float> convWeightsData(10);
77  armnn::ConstTensor weights(armnn::TensorInfo(4, dims, armnn::DataType::Float32), convWeightsData);
78 
80  armnn::IConnectableLayer* const convLayer = net.AddConvolution2dLayer(convDesc2d,
81  weights,
83  "conv layer");
84  CHECK(convLayer);
85 
86  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
87 
88  armnn::FullyConnectedDescriptor fullyConnectedDesc;
89 
90  // Constant layer that now holds weights data for FullyConnected
91  armnn::IConnectableLayer* const constantWeightsLayer = net.AddConstantLayer(weights, "const weights");
92  armnn::IConnectableLayer* const fullyConnectedLayer = net.AddFullyConnectedLayer(fullyConnectedDesc,
93  "fully connected");
94  CHECK(constantWeightsLayer);
95  CHECK(fullyConnectedLayer);
96 
97  constantWeightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
98  convLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
99 
100  armnn::Pooling2dDescriptor pooling2dDesc;
101  armnn::IConnectableLayer* const poolingLayer = net.AddPooling2dLayer(pooling2dDesc, "pooling2d");
102  CHECK(poolingLayer);
103 
104  fullyConnectedLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
105 
106  armnn::ActivationDescriptor activationDesc;
107  armnn::IConnectableLayer* const activationLayer = net.AddActivationLayer(activationDesc, "activation");
108  CHECK(activationLayer);
109 
110  poolingLayer->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0));
111 
112  armnn::NormalizationDescriptor normalizationDesc;
113  armnn::IConnectableLayer* const normalizationLayer = net.AddNormalizationLayer(normalizationDesc, "normalization");
114  CHECK(normalizationLayer);
115 
116  activationLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
117 
118  armnn::SoftmaxDescriptor softmaxDesc;
119  armnn::IConnectableLayer* const softmaxLayer = net.AddSoftmaxLayer(softmaxDesc, "softmax");
120  CHECK(softmaxLayer);
121 
122  normalizationLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
123 
125 
126  armnn::TensorInfo tensorInfo({ 1 }, armnn::DataType::Float32);
127  std::vector<float> data(tensorInfo.GetNumBytes() / sizeof(float));
128  armnn::ConstTensor invalidTensor(tensorInfo, data);
129 
130  armnn::IConnectableLayer* const batchNormalizationLayer = net.AddBatchNormalizationLayer(batchNormDesc,
131  invalidTensor,
132  invalidTensor,
133  invalidTensor,
134  invalidTensor,
135  "batch norm");
136  CHECK(batchNormalizationLayer);
137 
138  softmaxLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
139 
140  armnn::IConnectableLayer* const additionLayer = net.AddAdditionLayer("addition");
141  CHECK(additionLayer);
142 
143  batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
144  batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
145 
146  armnn::IConnectableLayer* const multiplicationLayer = net.AddMultiplicationLayer("multiplication");
147  CHECK(multiplicationLayer);
148 
149  additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
150  additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
151 
152  armnn::IConnectableLayer* const outputLayer = net.AddOutputLayer(0, "output layer");
153  CHECK(outputLayer);
154 
155  multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
156 
157  //Tests that all layers are present in the graph.
158  CHECK(net.GetGraph().GetNumLayers() == 12);
159 
160  //Tests that the vertices exist and have correct names.
161  CHECK(GraphHasNamedLayer(net.GetGraph(), "input layer"));
162  CHECK(GraphHasNamedLayer(net.GetGraph(), "conv layer"));
163  CHECK(GraphHasNamedLayer(net.GetGraph(), "const weights"));
164  CHECK(GraphHasNamedLayer(net.GetGraph(), "fully connected"));
165  CHECK(GraphHasNamedLayer(net.GetGraph(), "pooling2d"));
166  CHECK(GraphHasNamedLayer(net.GetGraph(), "activation"));
167  CHECK(GraphHasNamedLayer(net.GetGraph(), "normalization"));
168  CHECK(GraphHasNamedLayer(net.GetGraph(), "softmax"));
169  CHECK(GraphHasNamedLayer(net.GetGraph(), "batch norm"));
170  CHECK(GraphHasNamedLayer(net.GetGraph(), "addition"));
171  CHECK(GraphHasNamedLayer(net.GetGraph(), "multiplication"));
172  CHECK(GraphHasNamedLayer(net.GetGraph(), "output layer"));
173 
174  auto checkOneOutputToOneInputConnection = []
175  (const armnn::IConnectableLayer* const srcLayer,
176  const armnn::IConnectableLayer* const tgtLayer,
177  int expectedSrcNumInputs = 1,
178  int expectedDstNumOutputs = 1)
179  {
180  CHECK(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
181  CHECK(srcLayer->GetNumOutputSlots() == 1);
182  CHECK(tgtLayer->GetNumInputSlots() == 1);
183  CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
184 
185  CHECK(srcLayer->GetOutputSlot(0).GetNumConnections() == 1);
186  CHECK(srcLayer->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0));
187  CHECK(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection());
188  };
189  auto checkOneOutputToTwoInputsConnections = []
190  (const armnn::IConnectableLayer* const srcLayer,
191  const armnn::IConnectableLayer* const tgtLayer,
192  int expectedSrcNumInputs,
193  int expectedDstNumOutputs = 1)
194  {
195  CHECK(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
196  CHECK(srcLayer->GetNumOutputSlots() == 1);
197  CHECK(tgtLayer->GetNumInputSlots() == 2);
198  CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
199 
200  CHECK(srcLayer->GetOutputSlot(0).GetNumConnections() == 2);
201  for (unsigned int i = 0; i < srcLayer->GetOutputSlot(0).GetNumConnections(); ++i)
202  {
203  CHECK(srcLayer->GetOutputSlot(0).GetConnection(i) == &tgtLayer->GetInputSlot(i));
204  CHECK(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(i).GetConnection());
205  }
206  };
207  auto checkOneOutputToTwoInputConnectionForTwoDifferentLayers = []
208  (const armnn::IConnectableLayer* const srcLayer1,
209  const armnn::IConnectableLayer* const srcLayer2,
210  const armnn::IConnectableLayer* const tgtLayer,
211  int expectedSrcNumInputs1 = 1,
212  int expectedSrcNumInputs2 = 1,
213  int expectedDstNumOutputs = 1)
214  {
215  CHECK(srcLayer1->GetNumInputSlots() == expectedSrcNumInputs1);
216  CHECK(srcLayer1->GetNumOutputSlots() == 1);
217  CHECK(srcLayer2->GetNumInputSlots() == expectedSrcNumInputs2);
218  CHECK(srcLayer2->GetNumOutputSlots() == 1);
219  CHECK(tgtLayer->GetNumInputSlots() == 2);
220  CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
221 
222  CHECK(srcLayer1->GetOutputSlot(0).GetNumConnections() == 1);
223  CHECK(srcLayer2->GetOutputSlot(0).GetNumConnections() == 1);
224  CHECK(srcLayer1->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0));
225  CHECK(srcLayer2->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(1));
226  CHECK(&srcLayer1->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection());
227  CHECK(&srcLayer2->GetOutputSlot(0) == tgtLayer->GetInputSlot(1).GetConnection());
228  };
229 
230  CHECK(AreAllLayerInputSlotsConnected(*convLayer));
231  CHECK(AreAllLayerInputSlotsConnected(*fullyConnectedLayer));
232  CHECK(AreAllLayerInputSlotsConnected(*poolingLayer));
233  CHECK(AreAllLayerInputSlotsConnected(*activationLayer));
234  CHECK(AreAllLayerInputSlotsConnected(*normalizationLayer));
235  CHECK(AreAllLayerInputSlotsConnected(*softmaxLayer));
236  CHECK(AreAllLayerInputSlotsConnected(*batchNormalizationLayer));
237  CHECK(AreAllLayerInputSlotsConnected(*additionLayer));
238  CHECK(AreAllLayerInputSlotsConnected(*multiplicationLayer));
239  CHECK(AreAllLayerInputSlotsConnected(*outputLayer));
240 
241  // Checks connectivity.
242  checkOneOutputToOneInputConnection(inputLayer, convLayer, 0);
243  checkOneOutputToTwoInputConnectionForTwoDifferentLayers(convLayer, constantWeightsLayer, fullyConnectedLayer, 1, 0);
244  checkOneOutputToOneInputConnection(fullyConnectedLayer, poolingLayer, 2, 1);
245  checkOneOutputToOneInputConnection(poolingLayer, activationLayer);
246  checkOneOutputToOneInputConnection(activationLayer, normalizationLayer);
247  checkOneOutputToOneInputConnection(normalizationLayer, softmaxLayer);
248  checkOneOutputToOneInputConnection(softmaxLayer, batchNormalizationLayer);
249  checkOneOutputToTwoInputsConnections(batchNormalizationLayer, additionLayer, 1);
250  checkOneOutputToTwoInputsConnections(additionLayer, multiplicationLayer, 2);
251  checkOneOutputToOneInputConnection(multiplicationLayer, outputLayer, 2, 0);
252 }
253 
254 TEST_CASE("NetworkModification_SplitterConcat")
255 {
256  armnn::NetworkImpl net;
257 
258  // Adds an input layer and an input tensor descriptor.
259  armnn::IConnectableLayer* inputLayer = net.AddInputLayer(0, "input layer");
260  CHECK(inputLayer);
261 
262  // Adds a splitter layer.
263  armnn::ViewsDescriptor splitterDesc(2,4);
264 
265  armnn::IConnectableLayer* splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
266  CHECK(splitterLayer);
267 
268  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
269 
270  // Adds a softmax layer 1.
271  armnn::SoftmaxDescriptor softmaxDescriptor;
272  armnn::IConnectableLayer* softmaxLayer1 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
273  CHECK(softmaxLayer1);
274 
275  splitterLayer->GetOutputSlot(0).Connect(softmaxLayer1->GetInputSlot(0));
276 
277  // Adds a softmax layer 2.
278  armnn::IConnectableLayer* softmaxLayer2 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
279  CHECK(softmaxLayer2);
280 
281  splitterLayer->GetOutputSlot(1).Connect(softmaxLayer2->GetInputSlot(0));
282 
283  // Adds a concat layer.
284  armnn::OriginsDescriptor concatDesc(2, 4);
285 
286  armnn::IConnectableLayer* concatLayer = net.AddConcatLayer(concatDesc, "concat layer");
287  CHECK(concatLayer);
288 
289  softmaxLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
290  softmaxLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
291 
292  // Adds an output layer.
293  armnn::IConnectableLayer* outputLayer = net.AddOutputLayer(0, "output layer");
294  CHECK(outputLayer);
295 
296  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
297 
298  CHECK(splitterLayer->GetNumOutputSlots() == 2);
299  CHECK(splitterLayer->GetOutputSlot(0).GetConnection(0) == &softmaxLayer1->GetInputSlot(0));
300  CHECK(&splitterLayer->GetOutputSlot(0) == softmaxLayer1->GetInputSlot(0).GetConnection());
301  CHECK(splitterLayer->GetOutputSlot(1).GetConnection(0) == &softmaxLayer2->GetInputSlot(0));
302  CHECK(&splitterLayer->GetOutputSlot(1) == softmaxLayer2->GetInputSlot(0).GetConnection());
303 
304  CHECK(concatLayer->GetNumInputSlots() == 2);
305  CHECK(softmaxLayer1->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(0));
306  CHECK(&softmaxLayer1->GetOutputSlot(0) == concatLayer->GetInputSlot(0).GetConnection());
307  CHECK(softmaxLayer2->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(1));
308  CHECK(&softmaxLayer2->GetOutputSlot(0) == concatLayer->GetInputSlot(1).GetConnection());
309 }
310 
311 TEST_CASE("NetworkModification_SplitterAddition")
312 {
313  armnn::NetworkImpl net;
314 
315  // Adds an input layer and an input tensor descriptor.
316  armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
317  CHECK(layer);
318 
319  // Adds a splitter layer.
320  armnn::ViewsDescriptor splitterDesc(2,4);
321 
322  armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
323  CHECK(splitterLayer);
324 
325  layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
326 
327  // Adds a softmax layer 1.
328  armnn::SoftmaxDescriptor softmaxDescriptor;
329  armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
330  CHECK(softmax1Layer);
331 
332  splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
333 
334  // Adds a softmax layer 2.
335  armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
336  CHECK(softmax2Layer);
337 
338  splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
339 
340  // Adds addition layer.
341  layer = net.AddAdditionLayer("add layer");
342  CHECK(layer);
343 
344  softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
345  softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
346 
347  // Adds an output layer.
348  armnn::IConnectableLayer* prevLayer = layer;
349  layer = net.AddOutputLayer(0, "output layer");
350 
351  prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
352 
353  CHECK(layer);
354 }
355 
356 TEST_CASE("NetworkModification_SplitterMultiplication")
357 {
358  armnn::NetworkImpl net;
359 
360  // Adds an input layer and an input tensor descriptor.
361  armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
362  CHECK(layer);
363 
364  // Adds a splitter layer.
365  armnn::ViewsDescriptor splitterDesc(2,4);
366  armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
367  CHECK(splitterLayer);
368 
369  layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
370 
371  // Adds a softmax layer 1.
372  armnn::SoftmaxDescriptor softmaxDescriptor;
373  armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
374  CHECK(softmax1Layer);
375 
376  splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
377 
378  // Adds a softmax layer 2.
379  armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
380  CHECK(softmax2Layer);
381 
382  splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
383 
384  // Adds multiplication layer.
385  layer = net.AddMultiplicationLayer("multiplication layer");
386  CHECK(layer);
387 
388  softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
389  softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
390 
391  // Adds an output layer.
392  armnn::IConnectableLayer* prevLayer = layer;
393  layer = net.AddOutputLayer(0, "output layer");
394  CHECK(layer);
395 
396  prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
397 }
398 
399 TEST_CASE("Network_AddQuantize")
400 {
401  struct Test : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
402  {
403  void VisitQuantizeLayer(const armnn::IConnectableLayer* layer, const char* name) override
404  {
405  m_Visited = true;
406 
407  CHECK(layer);
408 
409  std::string expectedName = std::string("quantize");
410  CHECK(std::string(layer->GetName()) == expectedName);
411  CHECK(std::string(name) == expectedName);
412 
413  CHECK(layer->GetNumInputSlots() == 1);
414  CHECK(layer->GetNumOutputSlots() == 1);
415 
416  const armnn::TensorInfo& infoIn = layer->GetInputSlot(0).GetConnection()->GetTensorInfo();
417  CHECK((infoIn.GetDataType() == armnn::DataType::Float32));
418 
419  const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo();
420  CHECK((infoOut.GetDataType() == armnn::DataType::QAsymmU8));
421  }
422 
423  bool m_Visited = false;
424  };
425 
426 
427  auto graph = armnn::INetwork::Create();
428 
429  auto input = graph->AddInputLayer(0, "input");
430  auto quantize = graph->AddQuantizeLayer("quantize");
431  auto output = graph->AddOutputLayer(1, "output");
432 
433  input->GetOutputSlot(0).Connect(quantize->GetInputSlot(0));
434  quantize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
435 
437  input->GetOutputSlot(0).SetTensorInfo(infoIn);
438 
440  quantize->GetOutputSlot(0).SetTensorInfo(infoOut);
441 
442  Test testQuantize;
443  graph->Accept(testQuantize);
444 
445  CHECK(testQuantize.m_Visited == true);
446 
447 }
448 
449 TEST_CASE("Network_AddMerge")
450 {
451  struct Test : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
452  {
453  void VisitMergeLayer(const armnn::IConnectableLayer* layer, const char* name) override
454  {
455  m_Visited = true;
456 
457  CHECK(layer);
458 
459  std::string expectedName = std::string("merge");
460  CHECK(std::string(layer->GetName()) == expectedName);
461  CHECK(std::string(name) == expectedName);
462 
463  CHECK(layer->GetNumInputSlots() == 2);
464  CHECK(layer->GetNumOutputSlots() == 1);
465 
466  const armnn::TensorInfo& infoIn0 = layer->GetInputSlot(0).GetConnection()->GetTensorInfo();
467  CHECK((infoIn0.GetDataType() == armnn::DataType::Float32));
468 
469  const armnn::TensorInfo& infoIn1 = layer->GetInputSlot(1).GetConnection()->GetTensorInfo();
470  CHECK((infoIn1.GetDataType() == armnn::DataType::Float32));
471 
472  const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo();
473  CHECK((infoOut.GetDataType() == armnn::DataType::Float32));
474  }
475 
476  bool m_Visited = false;
477  };
478 
480 
481  armnn::IConnectableLayer* input0 = network->AddInputLayer(0);
482  armnn::IConnectableLayer* input1 = network->AddInputLayer(1);
483  armnn::IConnectableLayer* merge = network->AddMergeLayer("merge");
484  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
485 
486  input0->GetOutputSlot(0).Connect(merge->GetInputSlot(0));
487  input1->GetOutputSlot(0).Connect(merge->GetInputSlot(1));
488  merge->GetOutputSlot(0).Connect(output->GetInputSlot(0));
489 
491  input0->GetOutputSlot(0).SetTensorInfo(info);
492  input1->GetOutputSlot(0).SetTensorInfo(info);
493  merge->GetOutputSlot(0).SetTensorInfo(info);
494 
495  Test testMerge;
496  network->Accept(testMerge);
497 
498  CHECK(testMerge.m_Visited == true);
499 }
500 
501 TEST_CASE("StandInLayerNetworkTest")
502 {
503  // Create a simple network with a StandIn some place in it.
504  armnn::NetworkImpl net;
505  auto input = net.AddInputLayer(0);
506 
507  // Add some valid layer.
508  auto floor = net.AddFloorLayer("Floor");
509 
510  // Add a standin layer
511  armnn::StandInDescriptor standInDescriptor;
512  standInDescriptor.m_NumInputs = 1;
513  standInDescriptor.m_NumOutputs = 1;
514  auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn");
515 
516  // Finally the output.
517  auto output = net.AddOutputLayer(0);
518 
519  // Connect up the layers
520  input->GetOutputSlot(0).Connect(floor->GetInputSlot(0));
521 
522  floor->GetOutputSlot(0).Connect(standIn->GetInputSlot(0));
523 
524  standIn->GetOutputSlot(0).Connect(output->GetInputSlot(0));
525 
526  // Check that the layer is there.
527  CHECK(GraphHasNamedLayer(net.GetGraph(), "StandIn"));
528  // Check that it is connected as expected.
529  CHECK(input->GetOutputSlot(0).GetConnection(0) == &floor->GetInputSlot(0));
530  CHECK(floor->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0));
531  CHECK(standIn->GetOutputSlot(0).GetConnection(0) == &output->GetInputSlot(0));
532 }
533 
534 TEST_CASE("StandInLayerSingleInputMultipleOutputsNetworkTest")
535 {
536  // Another test with one input and two outputs on the StandIn layer.
537  armnn::NetworkImpl net;
538 
539  // Create the input.
540  auto input = net.AddInputLayer(0);
541 
542  // Add a standin layer
543  armnn::StandInDescriptor standInDescriptor;
544  standInDescriptor.m_NumInputs = 1;
545  standInDescriptor.m_NumOutputs = 2;
546  auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn");
547 
548  // Add two outputs.
549  auto output0 = net.AddOutputLayer(0);
550  auto output1 = net.AddOutputLayer(1);
551 
552  // Connect up the layers
553  input->GetOutputSlot(0).Connect(standIn->GetInputSlot(0));
554 
555  // Connect the two outputs of the Standin to the two outputs.
556  standIn->GetOutputSlot(0).Connect(output0->GetInputSlot(0));
557  standIn->GetOutputSlot(1).Connect(output1->GetInputSlot(0));
558 
559  // Check that the layer is there.
560  CHECK(GraphHasNamedLayer(net.GetGraph(), "StandIn"));
561  // Check that it is connected as expected.
562  CHECK(input->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0));
563  CHECK(standIn->GetOutputSlot(0).GetConnection(0) == &output0->GetInputSlot(0));
564  CHECK(standIn->GetOutputSlot(1).GetConnection(0) == &output1->GetInputSlot(0));
565 }
566 
567 }
virtual unsigned int GetNumOutputSlots() const =0
Returns the number of connectable output slots.
void VisitMergeLayer(const IConnectableLayer *, const char *) override
Function that a merge layer should call back to when its Accept(ILayerVisitor&) function is invoked...
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
virtual unsigned int GetNumInputSlots() const =0
Returns the number of connectable input slots.
IConnectableLayer * AddAdditionLayer(const char *name=nullptr)
Definition: Network.cpp:2119
IConnectableLayer * AddPooling2dLayer(const Pooling2dDescriptor &pooling2dDescriptor, const char *name=nullptr)
Definition: Network.cpp:2056
A Convolution2dDescriptor for the Convolution2dLayer.
IConnectableLayer * AddConstantLayer(const ConstTensor &input, const char *name=nullptr)
Definition: Network.cpp:2204
IConnectableLayer * AddNormalizationLayer(const NormalizationDescriptor &normalizationDescriptor, const char *name=nullptr)
Definition: Network.cpp:2074
Status PrintGraph()
Definition: Network.cpp:1799
bool GraphHasNamedLayer(const armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:10
IConnectableLayer * AddActivationLayer(const ActivationDescriptor &activationDescriptor, const char *name=nullptr)
Definition: Network.cpp:2062
virtual const IInputSlot * GetConnection(unsigned int index) const =0
void VisitQuantizeLayer(const IConnectableLayer *, const char *) override
Function a quantize layer should call back to when its Accept(ILayerVisitor&) function is invoked...
Private implementation of INetwork.
Definition: Network.hpp:31
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
uint32_t m_NumOutputs
Number of output tensors.
IConnectableLayer * AddFloorLayer(const char *name=nullptr)
Definition: Network.cpp:2231
IConnectableLayer * AddMultiplicationLayer(const char *name=nullptr)
Definition: Network.cpp:2124
TEST_SUITE("Network")
IConnectableLayer * AddInputLayer(LayerBindingId id, const char *name=nullptr)
Definition: Network.cpp:1805
DataType GetDataType() const
Definition: Tensor.hpp:198
An OriginsDescriptor for the ConcatLayer.
A FullyConnectedDescriptor for the FullyConnectedLayer.
virtual LayerGuid GetGuid() const =0
Returns the unique id of the layer.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
IConnectableLayer * AddOutputLayer(LayerBindingId id, const char *name=nullptr)
Definition: Network.cpp:2129
IConnectableLayer * AddConcatLayer(const ConcatDescriptor &concatDescriptor, const char *name=nullptr)
Definition: Network.cpp:1930
A StandInDescriptor for the StandIn layer.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
Visitor base class with empty implementations.
IConnectableLayer * AddStandInLayer(const StandInDescriptor &descriptor, const char *name=nullptr)
Definition: Network.cpp:2490
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
IConnectableLayer * AddFullyConnectedLayer(const FullyConnectedDescriptor &fullyConnectedDescriptor, const char *name=nullptr)
Definition: Network.cpp:1839
IConnectableLayer * AddSoftmaxLayer(const SoftmaxDescriptor &softmaxDescriptor, const char *name=nullptr)
Definition: Network.cpp:2086
uint32_t m_NumInputs
Number of input tensors.
const Graph & GetGraph() const
Definition: Network.hpp:37
profiling::ProfilingGuid LayerGuid
Define LayerGuid type.
Definition: Types.hpp:313
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot * GetConnection() const =0
virtual const TensorInfo & GetTensorInfo() const =0
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
virtual unsigned int GetNumConnections() const =0
virtual const char * GetName() const =0
Returns the name of the layer.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:172
IConnectableLayer * AddBatchNormalizationLayer(const BatchNormalizationDescriptor &desc, const ConstTensor &mean, const ConstTensor &variance, const ConstTensor &beta, const ConstTensor &gamma, const char *name=nullptr)
Definition: Network.cpp:2134
virtual int Connect(IInputSlot &destination)=0
A Pooling2dDescriptor for the Pooling2dLayer.
A NormalizationDescriptor for the NormalizationLayer.
size_t GetNumLayers() const
Definition: Graph.hpp:191
IConnectableLayer * AddConvolution2dLayer(const Convolution2dDescriptor &convolution2dDescriptor, const ConstTensor &weights, const Optional< ConstTensor > &biases, const char *name=nullptr)
Definition: Network.cpp:1958
IConnectableLayer * AddSplitterLayer(const ViewsDescriptor &splitterDescriptor, const char *name=nullptr)
Definition: Network.cpp:2092
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:530
A SoftmaxDescriptor for the SoftmaxLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.