ArmNN
 22.05
NetworkTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <GraphUtils.hpp>
7 
9 
10 #include <Network.hpp>
11 
12 #include <doctest/doctest.h>
13 
14 namespace
15 {
16 
17 bool AreAllLayerInputSlotsConnected(const armnn::IConnectableLayer& layer)
18 {
19  bool allConnected = true;
20  for (unsigned int i = 0; i < layer.GetNumInputSlots(); ++i)
21  {
22  const bool inputConnected = layer.GetInputSlot(i).GetConnection() != nullptr;
23  allConnected &= inputConnected;
24  }
25  return allConnected;
26 }
27 
28 }
29 
30 TEST_SUITE("Network")
31 {
32 TEST_CASE("LayerGuids")
33 {
35  LayerGuid inputId = net.AddInputLayer(0)->GetGuid();
36  LayerGuid addId = net.AddAdditionLayer()->GetGuid();
37  LayerGuid outputId = net.AddOutputLayer(0)->GetGuid();
38 
39  CHECK(inputId != addId);
40  CHECK(addId != outputId);
41  CHECK(inputId != outputId);
42 }
43 
44 TEST_CASE("NetworkBasic")
45 {
47  CHECK(net.PrintGraph() == armnn::Status::Success);
48 }
49 
50 TEST_CASE("LayerNamesAreOptionalForINetwork")
51 {
53  inet->AddInputLayer(0);
54  inet->AddAdditionLayer();
55  inet->AddActivationLayer(armnn::ActivationDescriptor());
56  inet->AddOutputLayer(0);
57 }
58 
59 TEST_CASE("LayerNamesAreOptionalForNetwork")
60 {
62  net.AddInputLayer(0);
63  net.AddAdditionLayer();
65  net.AddOutputLayer(0);
66 }
67 
68 TEST_CASE("NetworkModification")
69 {
71 
72  armnn::IConnectableLayer* const inputLayer = net.AddInputLayer(0, "input layer");
73  CHECK(inputLayer);
74 
75  unsigned int dims[] = { 10,1,1,1 };
76  std::vector<float> convWeightsData(10);
77  armnn::ConstTensor weights(armnn::TensorInfo(4, dims, armnn::DataType::Float32, 0.0f, 0, true), convWeightsData);
78 
80  armnn::IConnectableLayer* const weightsLayer = net.AddConstantLayer(weights, "conv const weights");
81  armnn::IConnectableLayer* const convLayer = net.AddConvolution2dLayer(convDesc2d, "conv layer");
82  CHECK(convLayer);
83  CHECK(weightsLayer);
84 
85  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
86  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
87 
88  armnn::FullyConnectedDescriptor fullyConnectedDesc;
89 
90  // Constant layer that now holds weights data for FullyConnected
91  armnn::IConnectableLayer* const constantWeightsLayer = net.AddConstantLayer(weights, "fc const weights");
92  armnn::IConnectableLayer* const fullyConnectedLayer = net.AddFullyConnectedLayer(fullyConnectedDesc,
93  "fully connected");
94  CHECK(constantWeightsLayer);
95  CHECK(fullyConnectedLayer);
96 
97  constantWeightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
98  convLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
99 
100  armnn::Pooling2dDescriptor pooling2dDesc;
101  armnn::IConnectableLayer* const poolingLayer = net.AddPooling2dLayer(pooling2dDesc, "pooling2d");
102  CHECK(poolingLayer);
103 
104  fullyConnectedLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
105 
106  armnn::ActivationDescriptor activationDesc;
107  armnn::IConnectableLayer* const activationLayer = net.AddActivationLayer(activationDesc, "activation");
108  CHECK(activationLayer);
109 
110  poolingLayer->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0));
111 
112  armnn::NormalizationDescriptor normalizationDesc;
113  armnn::IConnectableLayer* const normalizationLayer = net.AddNormalizationLayer(normalizationDesc, "normalization");
114  CHECK(normalizationLayer);
115 
116  activationLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
117 
118  armnn::SoftmaxDescriptor softmaxDesc;
119  armnn::IConnectableLayer* const softmaxLayer = net.AddSoftmaxLayer(softmaxDesc, "softmax");
120  CHECK(softmaxLayer);
121 
122  normalizationLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
123 
125 
126  armnn::TensorInfo tensorInfo({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
127  std::vector<float> data(tensorInfo.GetNumBytes() / sizeof(float));
128  armnn::ConstTensor invalidTensor(tensorInfo, data);
129 
130  armnn::IConnectableLayer* const batchNormalizationLayer = net.AddBatchNormalizationLayer(batchNormDesc,
131  invalidTensor,
132  invalidTensor,
133  invalidTensor,
134  invalidTensor,
135  "batch norm");
136  CHECK(batchNormalizationLayer);
137 
138  softmaxLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
139 
140  armnn::IConnectableLayer* const additionLayer = net.AddAdditionLayer("addition");
141  CHECK(additionLayer);
142 
143  batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
144  batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
145 
146  armnn::IConnectableLayer* const multiplicationLayer = net.AddMultiplicationLayer("multiplication");
147  CHECK(multiplicationLayer);
148 
149  additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
150  additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
151 
152  armnn::IConnectableLayer* const outputLayer = net.AddOutputLayer(0, "output layer");
153  CHECK(outputLayer);
154 
155  multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
156 
157  //Tests that all layers are present in the graph.
158  CHECK(net.GetGraph().GetNumLayers() == 13);
159 
160  //Tests that the vertices exist and have correct names.
161  CHECK(GraphHasNamedLayer(net.GetGraph(), "input layer"));
162  CHECK(GraphHasNamedLayer(net.GetGraph(), "conv layer"));
163  CHECK(GraphHasNamedLayer(net.GetGraph(), "conv const weights"));
164  CHECK(GraphHasNamedLayer(net.GetGraph(), "fc const weights"));
165  CHECK(GraphHasNamedLayer(net.GetGraph(), "fully connected"));
166  CHECK(GraphHasNamedLayer(net.GetGraph(), "pooling2d"));
167  CHECK(GraphHasNamedLayer(net.GetGraph(), "activation"));
168  CHECK(GraphHasNamedLayer(net.GetGraph(), "normalization"));
169  CHECK(GraphHasNamedLayer(net.GetGraph(), "softmax"));
170  CHECK(GraphHasNamedLayer(net.GetGraph(), "batch norm"));
171  CHECK(GraphHasNamedLayer(net.GetGraph(), "addition"));
172  CHECK(GraphHasNamedLayer(net.GetGraph(), "multiplication"));
173  CHECK(GraphHasNamedLayer(net.GetGraph(), "output layer"));
174 
175  auto checkOneOutputToOneInputConnection = []
176  (const armnn::IConnectableLayer* const srcLayer,
177  const armnn::IConnectableLayer* const tgtLayer,
178  int expectedSrcNumInputs = 1,
179  int expectedDstNumOutputs = 1)
180  {
181  CHECK(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
182  CHECK(srcLayer->GetNumOutputSlots() == 1);
183  CHECK(tgtLayer->GetNumInputSlots() == 1);
184  CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
185 
186  CHECK(srcLayer->GetOutputSlot(0).GetNumConnections() == 1);
187  CHECK(srcLayer->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0));
188  CHECK(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection());
189  };
190  auto checkOneOutputToTwoInputsConnections = []
191  (const armnn::IConnectableLayer* const srcLayer,
192  const armnn::IConnectableLayer* const tgtLayer,
193  int expectedSrcNumInputs,
194  int expectedDstNumOutputs = 1)
195  {
196  CHECK(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
197  CHECK(srcLayer->GetNumOutputSlots() == 1);
198  CHECK(tgtLayer->GetNumInputSlots() == 2);
199  CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
200 
201  CHECK(srcLayer->GetOutputSlot(0).GetNumConnections() == 2);
202  for (unsigned int i = 0; i < srcLayer->GetOutputSlot(0).GetNumConnections(); ++i)
203  {
204  CHECK(srcLayer->GetOutputSlot(0).GetConnection(i) == &tgtLayer->GetInputSlot(i));
205  CHECK(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(i).GetConnection());
206  }
207  };
208  auto checkOneOutputToTwoInputConnectionForTwoDifferentLayers = []
209  (const armnn::IConnectableLayer* const srcLayer1,
210  const armnn::IConnectableLayer* const srcLayer2,
211  const armnn::IConnectableLayer* const tgtLayer,
212  int expectedSrcNumInputs1 = 1,
213  int expectedSrcNumInputs2 = 1,
214  int expectedDstNumOutputs = 1)
215  {
216  CHECK(srcLayer1->GetNumInputSlots() == expectedSrcNumInputs1);
217  CHECK(srcLayer1->GetNumOutputSlots() == 1);
218  CHECK(srcLayer2->GetNumInputSlots() == expectedSrcNumInputs2);
219  CHECK(srcLayer2->GetNumOutputSlots() == 1);
220  CHECK(tgtLayer->GetNumInputSlots() == 2);
221  CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
222 
223  CHECK(srcLayer1->GetOutputSlot(0).GetNumConnections() == 1);
224  CHECK(srcLayer2->GetOutputSlot(0).GetNumConnections() == 1);
225  CHECK(srcLayer1->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0));
226  CHECK(srcLayer2->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(1));
227  CHECK(&srcLayer1->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection());
228  CHECK(&srcLayer2->GetOutputSlot(0) == tgtLayer->GetInputSlot(1).GetConnection());
229  };
230 
231  CHECK(AreAllLayerInputSlotsConnected(*convLayer));
232  CHECK(AreAllLayerInputSlotsConnected(*fullyConnectedLayer));
233  CHECK(AreAllLayerInputSlotsConnected(*poolingLayer));
234  CHECK(AreAllLayerInputSlotsConnected(*activationLayer));
235  CHECK(AreAllLayerInputSlotsConnected(*normalizationLayer));
236  CHECK(AreAllLayerInputSlotsConnected(*softmaxLayer));
237  CHECK(AreAllLayerInputSlotsConnected(*batchNormalizationLayer));
238  CHECK(AreAllLayerInputSlotsConnected(*additionLayer));
239  CHECK(AreAllLayerInputSlotsConnected(*multiplicationLayer));
240  CHECK(AreAllLayerInputSlotsConnected(*outputLayer));
241 
242  // Checks connectivity.
243  checkOneOutputToTwoInputConnectionForTwoDifferentLayers(inputLayer, weightsLayer, convLayer, 0, 0);
244  checkOneOutputToTwoInputConnectionForTwoDifferentLayers(convLayer, constantWeightsLayer, fullyConnectedLayer, 2, 0);
245  checkOneOutputToOneInputConnection(fullyConnectedLayer, poolingLayer, 2, 1);
246  checkOneOutputToOneInputConnection(poolingLayer, activationLayer);
247  checkOneOutputToOneInputConnection(activationLayer, normalizationLayer);
248  checkOneOutputToOneInputConnection(normalizationLayer, softmaxLayer);
249  checkOneOutputToOneInputConnection(softmaxLayer, batchNormalizationLayer);
250  checkOneOutputToTwoInputsConnections(batchNormalizationLayer, additionLayer, 1);
251  checkOneOutputToTwoInputsConnections(additionLayer, multiplicationLayer, 2);
252  checkOneOutputToOneInputConnection(multiplicationLayer, outputLayer, 2, 0);
253 }
254 
255 TEST_CASE("NetworkModification_SplitterConcat")
256 {
257  armnn::NetworkImpl net;
258 
259  // Adds an input layer and an input tensor descriptor.
260  armnn::IConnectableLayer* inputLayer = net.AddInputLayer(0, "input layer");
261  CHECK(inputLayer);
262 
263  // Adds a splitter layer.
264  armnn::ViewsDescriptor splitterDesc(2,4);
265 
266  armnn::IConnectableLayer* splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
267  CHECK(splitterLayer);
268 
269  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
270 
271  // Adds a softmax layer 1.
272  armnn::SoftmaxDescriptor softmaxDescriptor;
273  armnn::IConnectableLayer* softmaxLayer1 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
274  CHECK(softmaxLayer1);
275 
276  splitterLayer->GetOutputSlot(0).Connect(softmaxLayer1->GetInputSlot(0));
277 
278  // Adds a softmax layer 2.
279  armnn::IConnectableLayer* softmaxLayer2 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
280  CHECK(softmaxLayer2);
281 
282  splitterLayer->GetOutputSlot(1).Connect(softmaxLayer2->GetInputSlot(0));
283 
284  // Adds a concat layer.
285  armnn::OriginsDescriptor concatDesc(2, 4);
286 
287  armnn::IConnectableLayer* concatLayer = net.AddConcatLayer(concatDesc, "concat layer");
288  CHECK(concatLayer);
289 
290  softmaxLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
291  softmaxLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
292 
293  // Adds an output layer.
294  armnn::IConnectableLayer* outputLayer = net.AddOutputLayer(0, "output layer");
295  CHECK(outputLayer);
296 
297  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
298 
299  CHECK(splitterLayer->GetNumOutputSlots() == 2);
300  CHECK(splitterLayer->GetOutputSlot(0).GetConnection(0) == &softmaxLayer1->GetInputSlot(0));
301  CHECK(&splitterLayer->GetOutputSlot(0) == softmaxLayer1->GetInputSlot(0).GetConnection());
302  CHECK(splitterLayer->GetOutputSlot(1).GetConnection(0) == &softmaxLayer2->GetInputSlot(0));
303  CHECK(&splitterLayer->GetOutputSlot(1) == softmaxLayer2->GetInputSlot(0).GetConnection());
304 
305  CHECK(concatLayer->GetNumInputSlots() == 2);
306  CHECK(softmaxLayer1->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(0));
307  CHECK(&softmaxLayer1->GetOutputSlot(0) == concatLayer->GetInputSlot(0).GetConnection());
308  CHECK(softmaxLayer2->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(1));
309  CHECK(&softmaxLayer2->GetOutputSlot(0) == concatLayer->GetInputSlot(1).GetConnection());
310 }
311 
312 TEST_CASE("NetworkModification_SplitterAddition")
313 {
314  armnn::NetworkImpl net;
315 
316  // Adds an input layer and an input tensor descriptor.
317  armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
318  CHECK(layer);
319 
320  // Adds a splitter layer.
321  armnn::ViewsDescriptor splitterDesc(2,4);
322 
323  armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
324  CHECK(splitterLayer);
325 
326  layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
327 
328  // Adds a softmax layer 1.
329  armnn::SoftmaxDescriptor softmaxDescriptor;
330  armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
331  CHECK(softmax1Layer);
332 
333  splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
334 
335  // Adds a softmax layer 2.
336  armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
337  CHECK(softmax2Layer);
338 
339  splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
340 
341  // Adds addition layer.
342  layer = net.AddAdditionLayer("add layer");
343  CHECK(layer);
344 
345  softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
346  softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
347 
348  // Adds an output layer.
349  armnn::IConnectableLayer* prevLayer = layer;
350  layer = net.AddOutputLayer(0, "output layer");
351 
352  prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
353 
354  CHECK(layer);
355 }
356 
357 TEST_CASE("NetworkModification_SplitterMultiplication")
358 {
359  armnn::NetworkImpl net;
360 
361  // Adds an input layer and an input tensor descriptor.
362  armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
363  CHECK(layer);
364 
365  // Adds a splitter layer.
366  armnn::ViewsDescriptor splitterDesc(2,4);
367  armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
368  CHECK(splitterLayer);
369 
370  layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
371 
372  // Adds a softmax layer 1.
373  armnn::SoftmaxDescriptor softmaxDescriptor;
374  armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
375  CHECK(softmax1Layer);
376 
377  splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
378 
379  // Adds a softmax layer 2.
380  armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
381  CHECK(softmax2Layer);
382 
383  splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
384 
385  // Adds multiplication layer.
386  layer = net.AddMultiplicationLayer("multiplication layer");
387  CHECK(layer);
388 
389  softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
390  softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
391 
392  // Adds an output layer.
393  armnn::IConnectableLayer* prevLayer = layer;
394  layer = net.AddOutputLayer(0, "output layer");
395  CHECK(layer);
396 
397  prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
398 }
399 
400 TEST_CASE("Network_AddQuantize")
401 {
402  struct Test : public armnn::IStrategy
403  {
404  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
405  const armnn::BaseDescriptor& descriptor,
406  const std::vector<armnn::ConstTensor>& constants,
407  const char* name,
408  const armnn::LayerBindingId id = 0) override
409  {
410  armnn::IgnoreUnused(descriptor, constants, id);
411  switch (layer->GetType())
412  {
413  case armnn::LayerType::Input: break;
414  case armnn::LayerType::Output: break;
416  {
417  m_Visited = true;
418 
419  CHECK(layer);
420 
421  std::string expectedName = std::string("quantize");
422  CHECK(std::string(layer->GetName()) == expectedName);
423  CHECK(std::string(name) == expectedName);
424 
425  CHECK(layer->GetNumInputSlots() == 1);
426  CHECK(layer->GetNumOutputSlots() == 1);
427 
428  const armnn::TensorInfo& infoIn = layer->GetInputSlot(0).GetConnection()->GetTensorInfo();
429  CHECK((infoIn.GetDataType() == armnn::DataType::Float32));
430 
431  const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo();
432  CHECK((infoOut.GetDataType() == armnn::DataType::QAsymmU8));
433  break;
434  }
435  default:
436  {
437  // nothing
438  }
439  }
440  }
441 
442  bool m_Visited = false;
443  };
444 
445 
446  auto graph = armnn::INetwork::Create();
447 
448  auto input = graph->AddInputLayer(0, "input");
449  auto quantize = graph->AddQuantizeLayer("quantize");
450  auto output = graph->AddOutputLayer(1, "output");
451 
452  input->GetOutputSlot(0).Connect(quantize->GetInputSlot(0));
453  quantize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
454 
456  input->GetOutputSlot(0).SetTensorInfo(infoIn);
457 
459  quantize->GetOutputSlot(0).SetTensorInfo(infoOut);
460 
461  Test testQuantize;
462  graph->ExecuteStrategy(testQuantize);
463 
464  CHECK(testQuantize.m_Visited == true);
465 
466 }
467 
468 TEST_CASE("Network_AddMerge")
469 {
470  struct Test : public armnn::IStrategy
471  {
472  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
473  const armnn::BaseDescriptor& descriptor,
474  const std::vector<armnn::ConstTensor>& constants,
475  const char* name,
476  const armnn::LayerBindingId id = 0) override
477  {
478  armnn::IgnoreUnused(descriptor, constants, id);
479  switch (layer->GetType())
480  {
481  case armnn::LayerType::Input: break;
482  case armnn::LayerType::Output: break;
484  {
485  m_Visited = true;
486 
487  CHECK(layer);
488 
489  std::string expectedName = std::string("merge");
490  CHECK(std::string(layer->GetName()) == expectedName);
491  CHECK(std::string(name) == expectedName);
492 
493  CHECK(layer->GetNumInputSlots() == 2);
494  CHECK(layer->GetNumOutputSlots() == 1);
495 
496  const armnn::TensorInfo& infoIn0 = layer->GetInputSlot(0).GetConnection()->GetTensorInfo();
497  CHECK((infoIn0.GetDataType() == armnn::DataType::Float32));
498 
499  const armnn::TensorInfo& infoIn1 = layer->GetInputSlot(1).GetConnection()->GetTensorInfo();
500  CHECK((infoIn1.GetDataType() == armnn::DataType::Float32));
501 
502  const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo();
503  CHECK((infoOut.GetDataType() == armnn::DataType::Float32));
504  break;
505  }
506  default:
507  {
508  // nothing
509  }
510  }
511  }
512 
513  bool m_Visited = false;
514  };
515 
517 
518  armnn::IConnectableLayer* input0 = network->AddInputLayer(0);
519  armnn::IConnectableLayer* input1 = network->AddInputLayer(1);
520  armnn::IConnectableLayer* merge = network->AddMergeLayer("merge");
521  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
522 
523  input0->GetOutputSlot(0).Connect(merge->GetInputSlot(0));
524  input1->GetOutputSlot(0).Connect(merge->GetInputSlot(1));
525  merge->GetOutputSlot(0).Connect(output->GetInputSlot(0));
526 
528  input0->GetOutputSlot(0).SetTensorInfo(info);
529  input1->GetOutputSlot(0).SetTensorInfo(info);
530  merge->GetOutputSlot(0).SetTensorInfo(info);
531 
532  Test testMerge;
533  network->ExecuteStrategy(testMerge);
534 
535  CHECK(testMerge.m_Visited == true);
536 }
537 
538 TEST_CASE("StandInLayerNetworkTest")
539 {
540  // Create a simple network with a StandIn some place in it.
541  armnn::NetworkImpl net;
542  auto input = net.AddInputLayer(0);
543 
544  // Add some valid layer.
545  auto floor = net.AddFloorLayer("Floor");
546 
547  // Add a standin layer
548  armnn::StandInDescriptor standInDescriptor;
549  standInDescriptor.m_NumInputs = 1;
550  standInDescriptor.m_NumOutputs = 1;
551  auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn");
552 
553  // Finally the output.
554  auto output = net.AddOutputLayer(0);
555 
556  // Connect up the layers
557  input->GetOutputSlot(0).Connect(floor->GetInputSlot(0));
558 
559  floor->GetOutputSlot(0).Connect(standIn->GetInputSlot(0));
560 
561  standIn->GetOutputSlot(0).Connect(output->GetInputSlot(0));
562 
563  // Check that the layer is there.
564  CHECK(GraphHasNamedLayer(net.GetGraph(), "StandIn"));
565  // Check that it is connected as expected.
566  CHECK(input->GetOutputSlot(0).GetConnection(0) == &floor->GetInputSlot(0));
567  CHECK(floor->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0));
568  CHECK(standIn->GetOutputSlot(0).GetConnection(0) == &output->GetInputSlot(0));
569 }
570 
571 TEST_CASE("StandInLayerSingleInputMultipleOutputsNetworkTest")
572 {
573  // Another test with one input and two outputs on the StandIn layer.
574  armnn::NetworkImpl net;
575 
576  // Create the input.
577  auto input = net.AddInputLayer(0);
578 
579  // Add a standin layer
580  armnn::StandInDescriptor standInDescriptor;
581  standInDescriptor.m_NumInputs = 1;
582  standInDescriptor.m_NumOutputs = 2;
583  auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn");
584 
585  // Add two outputs.
586  auto output0 = net.AddOutputLayer(0);
587  auto output1 = net.AddOutputLayer(1);
588 
589  // Connect up the layers
590  input->GetOutputSlot(0).Connect(standIn->GetInputSlot(0));
591 
592  // Connect the two outputs of the Standin to the two outputs.
593  standIn->GetOutputSlot(0).Connect(output0->GetInputSlot(0));
594  standIn->GetOutputSlot(1).Connect(output1->GetInputSlot(0));
595 
596  // Check that the layer is there.
597  CHECK(GraphHasNamedLayer(net.GetGraph(), "StandIn"));
598  // Check that it is connected as expected.
599  CHECK(input->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0));
600  CHECK(standIn->GetOutputSlot(0).GetConnection(0) == &output0->GetInputSlot(0));
601  CHECK(standIn->GetOutputSlot(1).GetConnection(0) == &output1->GetInputSlot(0));
602 }
603 
604 TEST_CASE("ObtainConv2DDescriptorFromIConnectableLayer")
605 {
606  armnn::NetworkImpl net;
607 
608  unsigned int dims[] = { 10,1,1,1 };
609  std::vector<float> convWeightsData(10);
610  armnn::ConstTensor weights(armnn::TensorInfo(4, dims, armnn::DataType::Float32, 0.0f, 0, true), convWeightsData);
611 
613  convDesc2d.m_PadLeft = 2;
614  convDesc2d.m_PadRight = 3;
615  convDesc2d.m_PadTop = 4;
616  convDesc2d.m_PadBottom = 5;
617  convDesc2d.m_StrideX = 2;
618  convDesc2d.m_StrideY = 1;
619  convDesc2d.m_DilationX = 3;
620  convDesc2d.m_DilationY = 3;
621  convDesc2d.m_BiasEnabled = false;
624  armnn::IConnectableLayer* const convLayer = net.AddConvolution2dLayer(convDesc2d,
625  weights,
627  "conv layer");
629  CHECK(convLayer);
630 
631  const armnn::BaseDescriptor& descriptor = convLayer->GetParameters();
632  CHECK(descriptor.IsNull() == false);
633  const armnn::Convolution2dDescriptor& originalDescriptor =
634  static_cast<const armnn::Convolution2dDescriptor&>(descriptor);
635  CHECK(originalDescriptor.m_PadLeft == 2);
636  CHECK(originalDescriptor.m_PadRight == 3);
637  CHECK(originalDescriptor.m_PadTop == 4);
638  CHECK(originalDescriptor.m_PadBottom == 5);
639  CHECK(originalDescriptor.m_StrideX == 2);
640  CHECK(originalDescriptor.m_StrideY == 1);
641  CHECK(originalDescriptor.m_DilationX == 3);
642  CHECK(originalDescriptor.m_DilationY == 3);
643  CHECK(originalDescriptor.m_BiasEnabled == false);
644  CHECK(originalDescriptor.m_DataLayout == armnn::DataLayout::NCHW);
645 }
646 
647 TEST_CASE("CheckNullDescriptor")
648 {
649  armnn::NetworkImpl net;
650  armnn::IConnectableLayer* const addLayer = net.AddAdditionLayer();
651 
652  CHECK(addLayer);
653 
654  const armnn::BaseDescriptor& descriptor = addLayer->GetParameters();
655  // additional layer has no descriptor so a NullDescriptor will be returned
656  CHECK(descriptor.IsNull() == true);
657 }
658 
659 }
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
virtual unsigned int GetNumOutputSlots() const =0
Returns the number of connectable output slots.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
virtual unsigned int GetNumInputSlots() const =0
Returns the number of connectable input slots.
IConnectableLayer * AddAdditionLayer(const char *name=nullptr)
Definition: Network.cpp:2193
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
IConnectableLayer * AddPooling2dLayer(const Pooling2dDescriptor &pooling2dDescriptor, const char *name=nullptr)
Definition: Network.cpp:2135
A Convolution2dDescriptor for the Convolution2dLayer.
virtual void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0)=0
IConnectableLayer * AddConstantLayer(const ConstTensor &input, const char *name=nullptr)
Definition: Network.cpp:2264
IConnectableLayer * AddNormalizationLayer(const NormalizationDescriptor &normalizationDescriptor, const char *name=nullptr)
Definition: Network.cpp:2159
Status PrintGraph()
Definition: Network.cpp:1897
bool GraphHasNamedLayer(const armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:10
IConnectableLayer * AddConvolution2dLayer(const Convolution2dDescriptor &convolution2dDescriptor, const char *name=nullptr)
Definition: Network.cpp:2030
uint32_t m_PadRight
Padding right value in the width dimension.
IConnectableLayer * AddActivationLayer(const ActivationDescriptor &activationDescriptor, const char *name=nullptr)
Definition: Network.cpp:2147
virtual const BaseDescriptor & GetParameters() const =0
If the layer has a descriptor return it.
void IgnoreUnused(Ts &&...)
virtual const IInputSlot * GetConnection(unsigned int index) const =0
uint32_t m_DilationY
Dilation along y axis.
Private implementation of INetwork.
Definition: Network.hpp:31
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:290
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
uint32_t m_NumOutputs
Number of output tensors.
Base class for all descriptors.
Definition: Descriptors.hpp:22
IConnectableLayer * AddFloorLayer(const char *name=nullptr)
Definition: Network.cpp:2291
uint32_t m_PadTop
Padding top value in the height dimension.
IConnectableLayer * AddMultiplicationLayer(const char *name=nullptr)
Definition: Network.cpp:2198
TEST_SUITE("Network")
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
IConnectableLayer * AddInputLayer(LayerBindingId id, const char *name=nullptr)
Definition: Network.cpp:1903
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
DataType GetDataType() const
Definition: Tensor.hpp:198
An OriginsDescriptor for the ConcatLayer.
A FullyConnectedDescriptor for the FullyConnectedLayer.
virtual LayerGuid GetGuid() const =0
Returns the unique id of the layer.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
IConnectableLayer * AddOutputLayer(LayerBindingId id, const char *name=nullptr)
Definition: Network.cpp:2203
IConnectableLayer * AddConcatLayer(const ConcatDescriptor &concatDescriptor, const char *name=nullptr)
Definition: Network.cpp:2024
A StandInDescriptor for the StandIn layer.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:36
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
IConnectableLayer * AddStandInLayer(const StandInDescriptor &descriptor, const char *name=nullptr)
Definition: Network.cpp:2534
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
uint32_t m_DilationX
Dilation along x axis.
arm::pipe::ProfilingGuid LayerGuid
Define LayerGuid type.
Definition: Types.hpp:26
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
IConnectableLayer * AddFullyConnectedLayer(const FullyConnectedDescriptor &fullyConnectedDescriptor, const char *name=nullptr)
Definition: Network.cpp:1942
IConnectableLayer * AddSoftmaxLayer(const SoftmaxDescriptor &softmaxDescriptor, const char *name=nullptr)
Definition: Network.cpp:2171
uint32_t m_NumInputs
Number of input tensors.
const Graph & GetGraph() const
Definition: Network.hpp:37
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot * GetConnection() const =0
virtual const TensorInfo & GetTensorInfo() const =0
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
virtual unsigned int GetNumConnections() const =0
virtual const char * GetName() const =0
Returns the name of the layer.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:241
IConnectableLayer * AddBatchNormalizationLayer(const BatchNormalizationDescriptor &desc, const ConstTensor &mean, const ConstTensor &variance, const ConstTensor &beta, const ConstTensor &gamma, const char *name=nullptr)
Definition: Network.cpp:2208
virtual int Connect(IInputSlot &destination)=0
A Pooling2dDescriptor for the Pooling2dLayer.
A NormalizationDescriptor for the NormalizationLayer.
size_t GetNumLayers() const
Definition: Graph.hpp:198
IConnectableLayer * AddSplitterLayer(const ViewsDescriptor &splitterDescriptor, const char *name=nullptr)
Definition: Network.cpp:2177
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:476
A SoftmaxDescriptor for the SoftmaxLayer.
virtual bool IsNull() const
Definition: Descriptors.hpp:24
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.