ArmNN
 21.11
SerializerTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "../Serializer.hpp"
8 
9 #include <armnn/Descriptors.hpp>
10 #include <armnn/INetwork.hpp>
11 #include <armnn/TypesUtils.hpp>
12 #include <armnn/LstmParams.hpp>
16 
17 #include <random>
18 #include <vector>
19 
20 #include <doctest/doctest.h>
21 
23 
24 TEST_SUITE("SerializerTests")
25 {
26 
27 TEST_CASE("SerializeAddition")
28 {
29  const std::string layerName("addition");
30  const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32);
31 
33  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
34  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
35  armnn::IConnectableLayer* const additionLayer = network->AddAdditionLayer(layerName.c_str());
36  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
37 
38  inputLayer0->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
39  inputLayer1->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
40  additionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
41 
42  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
43  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
44  additionLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
45 
46  std::string serializedNetwork = SerializeNetwork(*network);
47  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(serializedNetwork);
48  CHECK(deserializedNetwork);
49 
50  LayerVerifierBase verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo});
51  deserializedNetwork->ExecuteStrategy(verifier);
52 }
53 
54 void SerializeArgMinMaxTest(armnn::DataType dataType)
55 {
56  const std::string layerName("argminmax");
57  const armnn::TensorInfo inputInfo({1, 2, 3}, armnn::DataType::Float32);
58  const armnn::TensorInfo outputInfo({1, 3}, dataType);
59 
60  armnn::ArgMinMaxDescriptor descriptor;
62  descriptor.m_Axis = 1;
63 
65  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
66  armnn::IConnectableLayer* const argMinMaxLayer = network->AddArgMinMaxLayer(descriptor, layerName.c_str());
67  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
68 
69  inputLayer->GetOutputSlot(0).Connect(argMinMaxLayer->GetInputSlot(0));
70  argMinMaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
71 
72  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
73  argMinMaxLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
74 
75  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
76  CHECK(deserializedNetwork);
77 
79  {inputInfo},
80  {outputInfo},
81  descriptor);
82  deserializedNetwork->ExecuteStrategy(verifier);
83 }
84 
85 TEST_CASE("SerializeArgMinMaxSigned32")
86 {
87  SerializeArgMinMaxTest(armnn::DataType::Signed32);
88 }
89 
90 TEST_CASE("SerializeArgMinMaxSigned64")
91 {
92  SerializeArgMinMaxTest(armnn::DataType::Signed64);
93 }
94 
95 TEST_CASE("SerializeBatchNormalization")
96 {
97  const std::string layerName("batchNormalization");
98  const armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32);
99  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
100 
101  const armnn::TensorInfo meanInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
102  const armnn::TensorInfo varianceInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
103  const armnn::TensorInfo betaInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
104  const armnn::TensorInfo gammaInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
105 
107  descriptor.m_Eps = 0.0010000000475f;
108  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
109 
110  std::vector<float> meanData({5.0});
111  std::vector<float> varianceData({2.0});
112  std::vector<float> betaData({1.0});
113  std::vector<float> gammaData({0.0});
114 
115  std::vector<armnn::ConstTensor> constants;
116  constants.emplace_back(armnn::ConstTensor(meanInfo, meanData));
117  constants.emplace_back(armnn::ConstTensor(varianceInfo, varianceData));
118  constants.emplace_back(armnn::ConstTensor(betaInfo, betaData));
119  constants.emplace_back(armnn::ConstTensor(gammaInfo, gammaData));
120 
122  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
123  armnn::IConnectableLayer* const batchNormalizationLayer =
124  network->AddBatchNormalizationLayer(descriptor,
125  constants[0],
126  constants[1],
127  constants[2],
128  constants[3],
129  layerName.c_str());
130  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
131 
132  inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
133  batchNormalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
134 
135  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
136  batchNormalizationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
137 
138  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
139  CHECK(deserializedNetwork);
140 
142  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
143  deserializedNetwork->ExecuteStrategy(verifier);
144 }
145 
146 TEST_CASE("SerializeBatchToSpaceNd")
147 {
148  const std::string layerName("spaceToBatchNd");
149  const armnn::TensorInfo inputInfo({4, 1, 2, 2}, armnn::DataType::Float32);
150  const armnn::TensorInfo outputInfo({1, 1, 4, 4}, armnn::DataType::Float32);
151 
154  desc.m_BlockShape = {2, 2};
155  desc.m_Crops = {{0, 0}, {0, 0}};
156 
158  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
159  armnn::IConnectableLayer* const batchToSpaceNdLayer = network->AddBatchToSpaceNdLayer(desc, layerName.c_str());
160  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
161 
162  inputLayer->GetOutputSlot(0).Connect(batchToSpaceNdLayer->GetInputSlot(0));
163  batchToSpaceNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
164 
165  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
166  batchToSpaceNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
167 
168  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
169  CHECK(deserializedNetwork);
170 
172  {inputInfo},
173  {outputInfo},
174  desc);
175  deserializedNetwork->ExecuteStrategy(verifier);
176 }
177 
178 TEST_CASE("SerializeCast")
179 {
180  const std::string layerName("cast");
181 
182  const armnn::TensorShape shape{1, 5, 2, 3};
183 
186 
188  armnn::IConnectableLayer* inputLayer = network->AddInputLayer(0);
189  armnn::IConnectableLayer* castLayer = network->AddCastLayer(layerName.c_str());
190  armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0);
191 
192  inputLayer->GetOutputSlot(0).Connect(castLayer->GetInputSlot(0));
193  castLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
194 
195  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
196  castLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
197 
198  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
199  CHECK(deserializedNetwork);
200 
201  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
202  deserializedNetwork->ExecuteStrategy(verifier);
203 }
204 
205 TEST_CASE("SerializeChannelShuffle")
206 {
207  const std::string layerName("channelShuffle");
208  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
209  const armnn::TensorInfo outputInfo({1, 9}, armnn::DataType::Float32);
210 
211  armnn::ChannelShuffleDescriptor descriptor({3, 1});
212 
214  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
215  armnn::IConnectableLayer* const ChannelShuffleLayer =
216  network->AddChannelShuffleLayer(descriptor, layerName.c_str());
217  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
218 
219  inputLayer->GetOutputSlot(0).Connect(ChannelShuffleLayer->GetInputSlot(0));
220  ChannelShuffleLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
221 
222  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
223  ChannelShuffleLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
224 
225  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
226  CHECK(deserializedNetwork);
227 
229  layerName, {inputInfo}, {outputInfo}, descriptor);
230  deserializedNetwork->ExecuteStrategy(verifier);
231 }
232 
233 TEST_CASE("SerializeComparison")
234 {
235  const std::string layerName("comparison");
236 
237  const armnn::TensorShape shape{2, 1, 2, 4};
238 
241 
243 
245  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
246  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
247  armnn::IConnectableLayer* const comparisonLayer = network->AddComparisonLayer(descriptor, layerName.c_str());
248  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
249 
250  inputLayer0->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(0));
251  inputLayer1->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(1));
252  comparisonLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
253 
254  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
255  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
256  comparisonLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
257 
258  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
259  CHECK(deserializedNetwork);
260 
262  { inputInfo, inputInfo },
263  { outputInfo },
264  descriptor);
265  deserializedNetwork->ExecuteStrategy(verifier);
266 }
267 
268 TEST_CASE("SerializeConstant")
269 {
270  class ConstantLayerVerifier : public LayerVerifierBase
271  {
272  public:
273  ConstantLayerVerifier(const std::string& layerName,
274  const std::vector<armnn::TensorInfo>& inputInfos,
275  const std::vector<armnn::TensorInfo>& outputInfos,
276  const std::vector<armnn::ConstTensor>& constants)
277  : LayerVerifierBase(layerName, inputInfos, outputInfos)
278  , m_Constants(constants) {}
279 
280  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
281  const armnn::BaseDescriptor& descriptor,
282  const std::vector<armnn::ConstTensor>& constants,
283  const char* name,
284  const armnn::LayerBindingId id = 0) override
285  {
286  armnn::IgnoreUnused(descriptor, id);
287 
288  switch (layer->GetType())
289  {
290  case armnn::LayerType::Input: break;
291  case armnn::LayerType::Output: break;
292  case armnn::LayerType::Addition: break;
293  default:
294  {
295  this->VerifyNameAndConnections(layer, name);
296 
297  for (std::size_t i = 0; i < constants.size(); i++)
298  {
299  CompareConstTensor(constants[i], m_Constants[i]);
300  }
301  }
302  }
303  }
304 
305  private:
306  const std::vector<armnn::ConstTensor> m_Constants;
307  };
308 
309  const std::string layerName("constant");
310  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
311 
312  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
313  armnn::ConstTensor constTensor(info, constantData);
314 
316  armnn::IConnectableLayer* input = network->AddInputLayer(0);
317  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
318  armnn::IConnectableLayer* add = network->AddAdditionLayer();
319  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
320 
321  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
322  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
323  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
324 
325  input->GetOutputSlot(0).SetTensorInfo(info);
326  constant->GetOutputSlot(0).SetTensorInfo(info);
327  add->GetOutputSlot(0).SetTensorInfo(info);
328 
329  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
330  CHECK(deserializedNetwork);
331 
332  ConstantLayerVerifier verifier(layerName, {}, {info}, {constTensor});
333  deserializedNetwork->ExecuteStrategy(verifier);
334 }
335 
336 TEST_CASE("SerializeConvolution2d")
337 {
338  const std::string layerName("convolution2d");
339  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
340  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
341 
342  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
343  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
344 
345  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
346  armnn::ConstTensor weights(weightsInfo, weightsData);
347 
348  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
349  armnn::ConstTensor biases(biasesInfo, biasesData);
350 
352  descriptor.m_PadLeft = 1;
353  descriptor.m_PadRight = 1;
354  descriptor.m_PadTop = 1;
355  descriptor.m_PadBottom = 1;
356  descriptor.m_StrideX = 2;
357  descriptor.m_StrideY = 2;
358  descriptor.m_DilationX = 2;
359  descriptor.m_DilationY = 2;
360  descriptor.m_BiasEnabled = true;
362 
364  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
365  armnn::IConnectableLayer* const convLayer =
366  network->AddConvolution2dLayer(descriptor,
367  weights,
369  layerName.c_str());
370  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
371 
372  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
373  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
374 
375  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
376  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
377 
378  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
379  CHECK(deserializedNetwork);
380 
381  const std::vector<armnn::ConstTensor>& constants {weights, biases};
383  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
384  deserializedNetwork->ExecuteStrategy(verifier);
385 }
386 
387 TEST_CASE("SerializeConvolution2dWithPerAxisParams")
388 {
389  using namespace armnn;
390 
391  const std::string layerName("convolution2dWithPerAxis");
392  const TensorInfo inputInfo ({ 1, 3, 1, 2 }, DataType::QAsymmU8, 0.55f, 128);
393  const TensorInfo outputInfo({ 1, 3, 1, 3 }, DataType::QAsymmU8, 0.75f, 128);
394 
395  const std::vector<float> quantScales{ 0.75f, 0.65f, 0.85f };
396  constexpr unsigned int quantDimension = 0;
397 
398  const TensorInfo kernelInfo({ 3, 1, 1, 2 }, DataType::QSymmS8, quantScales, quantDimension, true);
399 
400  const std::vector<float> biasQuantScales{ 0.25f, 0.50f, 0.75f };
401  const TensorInfo biasInfo({ 3 }, DataType::Signed32, biasQuantScales, quantDimension, true);
402 
403  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
404  armnn::ConstTensor weights(kernelInfo, kernelData);
405  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
406  armnn::ConstTensor biases(biasInfo, biasData);
407 
408  Convolution2dDescriptor descriptor;
409  descriptor.m_StrideX = 1;
410  descriptor.m_StrideY = 1;
411  descriptor.m_PadLeft = 0;
412  descriptor.m_PadRight = 0;
413  descriptor.m_PadTop = 0;
414  descriptor.m_PadBottom = 0;
415  descriptor.m_BiasEnabled = true;
417 
419  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
420  armnn::IConnectableLayer* const convLayer =
421  network->AddConvolution2dLayer(descriptor,
422  weights,
424  layerName.c_str());
425  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
426 
427  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
428  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
429 
430  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
431  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
432 
433  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
434  CHECK(deserializedNetwork);
435 
436  const std::vector<armnn::ConstTensor>& constants {weights, biases};
438  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
439  deserializedNetwork->ExecuteStrategy(verifier);
440 }
441 
442 TEST_CASE("SerializeConvolution3d")
443 {
444  const std::string layerName("convolution3d");
445  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 5, 1 }, armnn::DataType::Float32);
446  const armnn::TensorInfo outputInfo({ 1, 2, 2, 2, 1 }, armnn::DataType::Float32);
447 
448  const armnn::TensorInfo weightsInfo({ 3, 3, 3, 1, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
449  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
450 
451  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
452  armnn::ConstTensor weights(weightsInfo, weightsData);
453 
454  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
455  armnn::ConstTensor biases(biasesInfo, biasesData);
456 
458  descriptor.m_PadLeft = 0;
459  descriptor.m_PadRight = 0;
460  descriptor.m_PadTop = 0;
461  descriptor.m_PadBottom = 0;
462  descriptor.m_PadFront = 0;
463  descriptor.m_PadBack = 0;
464  descriptor.m_DilationX = 1;
465  descriptor.m_DilationY = 1;
466  descriptor.m_DilationZ = 1;
467  descriptor.m_StrideX = 2;
468  descriptor.m_StrideY = 2;
469  descriptor.m_StrideZ = 2;
470  descriptor.m_BiasEnabled = true;
472 
474  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
475  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
476  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
477  armnn::IConnectableLayer* const convLayer = network->AddConvolution3dLayer(descriptor, layerName.c_str());
478  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
479 
480  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
481  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
482  biasesLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
483  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
484 
485  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
486  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
487  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
488  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
489 
490  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
491  CHECK(deserializedNetwork);
492 
494  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
495  deserializedNetwork->ExecuteStrategy(verifier);
496 }
497 
498 TEST_CASE("SerializeDepthToSpace")
499 {
500  const std::string layerName("depthToSpace");
501 
502  const armnn::TensorInfo inputInfo ({ 1, 8, 4, 12 }, armnn::DataType::Float32);
503  const armnn::TensorInfo outputInfo({ 1, 16, 8, 3 }, armnn::DataType::Float32);
504 
506  desc.m_BlockSize = 2;
507  desc.m_DataLayout = armnn::DataLayout::NHWC;
508 
510  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
511  armnn::IConnectableLayer* const depthToSpaceLayer = network->AddDepthToSpaceLayer(desc, layerName.c_str());
512  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
513 
514  inputLayer->GetOutputSlot(0).Connect(depthToSpaceLayer->GetInputSlot(0));
515  depthToSpaceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
516 
517  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
518  depthToSpaceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
519 
520  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
521  CHECK(deserializedNetwork);
522 
523  LayerVerifierBaseWithDescriptor<armnn::DepthToSpaceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
524  deserializedNetwork->ExecuteStrategy(verifier);
525 }
526 
527 TEST_CASE("SerializeDepthwiseConvolution2d")
528 {
529  const std::string layerName("depwiseConvolution2d");
530  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32);
531  const armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
532 
533  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
534  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
535 
536  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
537  armnn::ConstTensor weights(weightsInfo, weightsData);
538 
539  std::vector<int32_t> biasesData = GenerateRandomData<int32_t>(biasesInfo.GetNumElements());
540  armnn::ConstTensor biases(biasesInfo, biasesData);
541 
543  descriptor.m_PadLeft = 1;
544  descriptor.m_PadRight = 1;
545  descriptor.m_PadTop = 1;
546  descriptor.m_PadBottom = 1;
547  descriptor.m_StrideX = 2;
548  descriptor.m_StrideY = 2;
549  descriptor.m_DilationX = 2;
550  descriptor.m_DilationY = 2;
551  descriptor.m_BiasEnabled = true;
553 
555  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
556  armnn::IConnectableLayer* const depthwiseConvLayer =
557  network->AddDepthwiseConvolution2dLayer(descriptor,
558  weights,
560  layerName.c_str());
561  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
562 
563  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
564  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
565 
566  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
567  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
568 
569  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
570  CHECK(deserializedNetwork);
571 
572  const std::vector<armnn::ConstTensor>& constants {weights, biases};
574  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
575  deserializedNetwork->ExecuteStrategy(verifier);
576 }
577 
578 TEST_CASE("SerializeDepthwiseConvolution2dWithPerAxisParams")
579 {
580  using namespace armnn;
581 
582  const std::string layerName("depwiseConvolution2dWithPerAxis");
583  const TensorInfo inputInfo ({ 1, 3, 3, 2 }, DataType::QAsymmU8, 0.55f, 128);
584  const TensorInfo outputInfo({ 1, 2, 2, 4 }, DataType::QAsymmU8, 0.75f, 128);
585 
586  const std::vector<float> quantScales{ 0.75f, 0.80f, 0.90f, 0.95f };
587  const unsigned int quantDimension = 0;
588  TensorInfo kernelInfo({ 2, 2, 2, 2 }, DataType::QSymmS8, quantScales, quantDimension, true);
589 
590  const std::vector<float> biasQuantScales{ 0.25f, 0.35f, 0.45f, 0.55f };
591  constexpr unsigned int biasQuantDimension = 0;
592  TensorInfo biasInfo({ 4 }, DataType::Signed32, biasQuantScales, biasQuantDimension, true);
593 
594  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
595  armnn::ConstTensor weights(kernelInfo, kernelData);
596  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
597  armnn::ConstTensor biases(biasInfo, biasData);
598 
600  descriptor.m_StrideX = 1;
601  descriptor.m_StrideY = 1;
602  descriptor.m_PadLeft = 0;
603  descriptor.m_PadRight = 0;
604  descriptor.m_PadTop = 0;
605  descriptor.m_PadBottom = 0;
606  descriptor.m_DilationX = 1;
607  descriptor.m_DilationY = 1;
608  descriptor.m_BiasEnabled = true;
610 
612  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
613  armnn::IConnectableLayer* const depthwiseConvLayer =
614  network->AddDepthwiseConvolution2dLayer(descriptor,
615  weights,
617  layerName.c_str());
618  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
619 
620  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
621  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
622 
623  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
624  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
625 
626  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
627  CHECK(deserializedNetwork);
628 
629  const std::vector<armnn::ConstTensor>& constants {weights, biases};
631  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
632  deserializedNetwork->ExecuteStrategy(verifier);
633 }
634 
635 TEST_CASE("SerializeDequantize")
636 {
637  const std::string layerName("dequantize");
638  const armnn::TensorInfo inputInfo({ 1, 5, 2, 3 }, armnn::DataType::QAsymmU8, 0.5f, 1);
639  const armnn::TensorInfo outputInfo({ 1, 5, 2, 3 }, armnn::DataType::Float32);
640 
642  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
643  armnn::IConnectableLayer* const dequantizeLayer = network->AddDequantizeLayer(layerName.c_str());
644  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
645 
646  inputLayer->GetOutputSlot(0).Connect(dequantizeLayer->GetInputSlot(0));
647  dequantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
648 
649  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
650  dequantizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
651 
652  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
653  CHECK(deserializedNetwork);
654 
655  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
656  deserializedNetwork->ExecuteStrategy(verifier);
657 }
658 
659 TEST_CASE("SerializeDeserializeDetectionPostProcess")
660 {
661  const std::string layerName("detectionPostProcess");
662 
663  const std::vector<armnn::TensorInfo> inputInfos({
666  });
667 
668  const std::vector<armnn::TensorInfo> outputInfos({
673  });
674 
676  descriptor.m_UseRegularNms = true;
677  descriptor.m_MaxDetections = 3;
678  descriptor.m_MaxClassesPerDetection = 1;
679  descriptor.m_DetectionsPerClass =1;
680  descriptor.m_NmsScoreThreshold = 0.0;
681  descriptor.m_NmsIouThreshold = 0.5;
682  descriptor.m_NumClasses = 2;
683  descriptor.m_ScaleY = 10.0;
684  descriptor.m_ScaleX = 10.0;
685  descriptor.m_ScaleH = 5.0;
686  descriptor.m_ScaleW = 5.0;
687 
688  const armnn::TensorInfo anchorsInfo({ 6, 4 }, armnn::DataType::Float32, 0.0f, 0, true);
689  const std::vector<float> anchorsData({
690  0.5f, 0.5f, 1.0f, 1.0f,
691  0.5f, 0.5f, 1.0f, 1.0f,
692  0.5f, 0.5f, 1.0f, 1.0f,
693  0.5f, 10.5f, 1.0f, 1.0f,
694  0.5f, 10.5f, 1.0f, 1.0f,
695  0.5f, 100.5f, 1.0f, 1.0f
696  });
697  armnn::ConstTensor anchors(anchorsInfo, anchorsData);
698 
700  armnn::IConnectableLayer* const detectionLayer =
701  network->AddDetectionPostProcessLayer(descriptor, anchors, layerName.c_str());
702 
703  for (unsigned int i = 0; i < 2; i++)
704  {
705  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(static_cast<int>(i));
706  inputLayer->GetOutputSlot(0).Connect(detectionLayer->GetInputSlot(i));
707  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfos[i]);
708  }
709 
710  for (unsigned int i = 0; i < 4; i++)
711  {
712  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(static_cast<int>(i));
713  detectionLayer->GetOutputSlot(i).Connect(outputLayer->GetInputSlot(0));
714  detectionLayer->GetOutputSlot(i).SetTensorInfo(outputInfos[i]);
715  }
716 
717  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
718  CHECK(deserializedNetwork);
719 
720  const std::vector<armnn::ConstTensor>& constants {anchors};
722  layerName, inputInfos, outputInfos, descriptor, constants);
723  deserializedNetwork->ExecuteStrategy(verifier);
724 }
725 
726 TEST_CASE("SerializeDivision")
727 {
728  const std::string layerName("division");
729  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
730 
732  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
733  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
734  armnn::IConnectableLayer* const divisionLayer = network->AddDivisionLayer(layerName.c_str());
735  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
736 
737  inputLayer0->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(0));
738  inputLayer1->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(1));
739  divisionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
740 
741  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
742  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
743  divisionLayer->GetOutputSlot(0).SetTensorInfo(info);
744 
745  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
746  CHECK(deserializedNetwork);
747 
748  LayerVerifierBase verifier(layerName, {info, info}, {info});
749  deserializedNetwork->ExecuteStrategy(verifier);
750 }
751 
752 TEST_CASE("SerializeDeserializeComparisonEqual")
753 {
754  const std::string layerName("EqualLayer");
755  const armnn::TensorInfo inputTensorInfo1 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
756  const armnn::TensorInfo inputTensorInfo2 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
757  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Boolean);
758 
760  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
761  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
763  armnn::IConnectableLayer* const equalLayer = network->AddComparisonLayer(equalDescriptor, layerName.c_str());
764  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
765 
766  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
767  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo1);
768  inputLayer2->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
769  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo2);
770  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
771  equalLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
772 
773  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
774  CHECK(deserializedNetwork);
775 
776  LayerVerifierBase verifier(layerName, {inputTensorInfo1, inputTensorInfo2}, {outputTensorInfo});
777  deserializedNetwork->ExecuteStrategy(verifier);
778 }
779 
780 void SerializeElementwiseUnaryTest(armnn::UnaryOperation unaryOperation)
781 {
782  auto layerName = GetUnaryOperationAsCString(unaryOperation);
783 
784  const armnn::TensorShape shape{2, 1, 2, 2};
785 
788 
789  armnn::ElementwiseUnaryDescriptor descriptor(unaryOperation);
790 
792  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
793  armnn::IConnectableLayer* const elementwiseUnaryLayer =
794  network->AddElementwiseUnaryLayer(descriptor, layerName);
795  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
796 
797  inputLayer->GetOutputSlot(0).Connect(elementwiseUnaryLayer->GetInputSlot(0));
798  elementwiseUnaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
799 
800  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
801  elementwiseUnaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
802 
803  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
804 
805  CHECK(deserializedNetwork);
806 
808  verifier(layerName, { inputInfo }, { outputInfo }, descriptor);
809 
810  deserializedNetwork->ExecuteStrategy(verifier);
811 }
812 
813 TEST_CASE("SerializeElementwiseUnary")
814 {
815  using op = armnn::UnaryOperation;
816  std::initializer_list<op> allUnaryOperations = {op::Abs, op::Exp, op::Sqrt, op::Rsqrt, op::Neg,
817  op::LogicalNot, op::Log, op::Sin};
818 
819  for (auto unaryOperation : allUnaryOperations)
820  {
821  SerializeElementwiseUnaryTest(unaryOperation);
822  }
823 }
824 
825 TEST_CASE("SerializeFill")
826 {
827  const std::string layerName("fill");
828  const armnn::TensorInfo inputInfo({4}, armnn::DataType::Signed32);
829  const armnn::TensorInfo outputInfo({1, 3, 3, 1}, armnn::DataType::Float32);
830 
831  armnn::FillDescriptor descriptor(1.0f);
832 
834  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
835  armnn::IConnectableLayer* const fillLayer = network->AddFillLayer(descriptor, layerName.c_str());
836  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
837 
838  inputLayer->GetOutputSlot(0).Connect(fillLayer->GetInputSlot(0));
839  fillLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
840 
841  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
842  fillLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
843 
844  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
845  CHECK(deserializedNetwork);
846 
847  LayerVerifierBaseWithDescriptor<armnn::FillDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
848 
849  deserializedNetwork->ExecuteStrategy(verifier);
850 }
851 
852 TEST_CASE("SerializeFloor")
853 {
854  const std::string layerName("floor");
856 
858  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
859  armnn::IConnectableLayer* const floorLayer = network->AddFloorLayer(layerName.c_str());
860  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
861 
862  inputLayer->GetOutputSlot(0).Connect(floorLayer->GetInputSlot(0));
863  floorLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
864 
865  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
866  floorLayer->GetOutputSlot(0).SetTensorInfo(info);
867 
868  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
869  CHECK(deserializedNetwork);
870 
871  LayerVerifierBase verifier(layerName, {info}, {info});
872  deserializedNetwork->ExecuteStrategy(verifier);
873 }
874 
876 class FullyConnectedLayerVerifier : public LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>
877 {
878 public:
879  FullyConnectedLayerVerifier(const std::string& layerName,
880  const std::vector<armnn::TensorInfo>& inputInfos,
881  const std::vector<armnn::TensorInfo>& outputInfos,
882  const FullyConnectedDescriptor& descriptor)
883  : LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
884 
885  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
886  const armnn::BaseDescriptor& descriptor,
887  const std::vector<armnn::ConstTensor>& constants,
888  const char* name,
889  const armnn::LayerBindingId id = 0) override
890  {
891  armnn::IgnoreUnused(constants, id);
892  switch (layer->GetType())
893  {
894  case armnn::LayerType::Input: break;
895  case armnn::LayerType::Output: break;
896  case armnn::LayerType::Constant: break;
897  default:
898  {
899  VerifyNameAndConnections(layer, name);
900  const FullyConnectedDescriptor& layerDescriptor =
901  static_cast<const FullyConnectedDescriptor&>(descriptor);
902  CHECK(layerDescriptor.m_ConstantWeights == m_Descriptor.m_ConstantWeights);
903  CHECK(layerDescriptor.m_BiasEnabled == m_Descriptor.m_BiasEnabled);
904  CHECK(layerDescriptor.m_TransposeWeightMatrix == m_Descriptor.m_TransposeWeightMatrix);
905  }
906  }
907  }
908 };
909 
910 TEST_CASE("SerializeFullyConnected")
911 {
912  const std::string layerName("fullyConnected");
913  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
914  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
915 
916  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
917  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
918  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
919  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
920  armnn::ConstTensor weights(weightsInfo, weightsData);
921  armnn::ConstTensor biases(biasesInfo, biasesData);
922 
924  descriptor.m_BiasEnabled = true;
925  descriptor.m_TransposeWeightMatrix = false;
926  descriptor.m_ConstantWeights = true;
927 
929  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
930 
931  // Old way of handling constant tensors.
933  armnn::IConnectableLayer* const fullyConnectedLayer =
934  network->AddFullyConnectedLayer(descriptor,
935  weights,
937  layerName.c_str());
939 
940  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
941 
942  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
943  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
944 
945  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
946  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
947 
948  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
949  CHECK(deserializedNetwork);
950 
951  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
952  deserializedNetwork->ExecuteStrategy(verifier);
953 }
954 
955 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsInputs")
956 {
957  const std::string layerName("fullyConnected_weights_as_inputs");
958  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
959  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
960 
961  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32);
962  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
963 
966 
968  descriptor.m_BiasEnabled = true;
969  descriptor.m_TransposeWeightMatrix = false;
970  descriptor.m_ConstantWeights = false;
971 
973  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
974  armnn::IConnectableLayer* const weightsInputLayer = network->AddInputLayer(1);
975  armnn::IConnectableLayer* const biasInputLayer = network->AddInputLayer(2);
976  armnn::IConnectableLayer* const fullyConnectedLayer =
977  network->AddFullyConnectedLayer(descriptor,
978  layerName.c_str());
979  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
980 
981  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
982  weightsInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
983  biasInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
984  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
985 
986  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
987  weightsInputLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
988  biasInputLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
989  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
990 
991  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
992  CHECK(deserializedNetwork);
993 
994  const std::vector<armnn::ConstTensor> constants {};
996  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
997  deserializedNetwork->ExecuteStrategy(verifier);
998 }
999 
1000 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsConstantLayers")
1001 {
1002  const std::string layerName("fullyConnected_weights_as_inputs");
1003  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1004  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1005 
1006  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1007  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1008 
1009  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
1010  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
1011  armnn::ConstTensor weights(weightsInfo, weightsData);
1012  armnn::ConstTensor biases(biasesInfo, biasesData);
1013 
1015  descriptor.m_BiasEnabled = true;
1016  descriptor.m_TransposeWeightMatrix = false;
1017  descriptor.m_ConstantWeights = true;
1018 
1020  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1021  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
1022  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
1023  armnn::IConnectableLayer* const fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor,layerName.c_str());
1024  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1025 
1026  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1027  weightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
1028  biasesLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
1029  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1030 
1031  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1032  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
1033  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
1034  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1035 
1036  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1037  CHECK(deserializedNetwork);
1038 
1039  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
1040  deserializedNetwork->ExecuteStrategy(verifier);
1041 }
1042 
1043 TEST_CASE("SerializeGather")
1044 {
1046  class GatherLayerVerifier : public LayerVerifierBaseWithDescriptor<GatherDescriptor>
1047  {
1048  public:
1049  GatherLayerVerifier(const std::string& layerName,
1050  const std::vector<armnn::TensorInfo>& inputInfos,
1051  const std::vector<armnn::TensorInfo>& outputInfos,
1052  const GatherDescriptor& descriptor)
1053  : LayerVerifierBaseWithDescriptor<GatherDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1054 
1055  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1056  const armnn::BaseDescriptor& descriptor,
1057  const std::vector<armnn::ConstTensor>& constants,
1058  const char* name,
1059  const armnn::LayerBindingId id = 0) override
1060  {
1061  armnn::IgnoreUnused(constants, id);
1062  switch (layer->GetType())
1063  {
1064  case armnn::LayerType::Input: break;
1065  case armnn::LayerType::Output: break;
1066  case armnn::LayerType::Constant: break;
1067  default:
1068  {
1069  VerifyNameAndConnections(layer, name);
1070  const GatherDescriptor& layerDescriptor = static_cast<const GatherDescriptor&>(descriptor);
1071  CHECK(layerDescriptor.m_Axis == m_Descriptor.m_Axis);
1072  }
1073  }
1074  }
1075  };
1076 
1077  const std::string layerName("gather");
1078  armnn::TensorInfo paramsInfo({ 8 }, armnn::DataType::QAsymmU8);
1079  armnn::TensorInfo outputInfo({ 3 }, armnn::DataType::QAsymmU8);
1080  const armnn::TensorInfo indicesInfo({ 3 }, armnn::DataType::Signed32, 0.0f, 0, true);
1081  GatherDescriptor descriptor;
1082  descriptor.m_Axis = 1;
1083 
1084  paramsInfo.SetQuantizationScale(1.0f);
1085  paramsInfo.SetQuantizationOffset(0);
1086  outputInfo.SetQuantizationScale(1.0f);
1087  outputInfo.SetQuantizationOffset(0);
1088 
1089  const std::vector<int32_t>& indicesData = {7, 6, 5};
1090 
1092  armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
1093  armnn::IConnectableLayer *const constantLayer =
1094  network->AddConstantLayer(armnn::ConstTensor(indicesInfo, indicesData));
1095  armnn::IConnectableLayer *const gatherLayer = network->AddGatherLayer(descriptor, layerName.c_str());
1096  armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
1097 
1098  inputLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(0));
1099  constantLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(1));
1100  gatherLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1101 
1102  inputLayer->GetOutputSlot(0).SetTensorInfo(paramsInfo);
1103  constantLayer->GetOutputSlot(0).SetTensorInfo(indicesInfo);
1104  gatherLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1105 
1106  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1107  CHECK(deserializedNetwork);
1108 
1109  GatherLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo}, descriptor);
1110  deserializedNetwork->ExecuteStrategy(verifier);
1111 }
1112 
1113 
1114 TEST_CASE("SerializeComparisonGreater")
1115 {
1116  const std::string layerName("greater");
1117 
1118  const armnn::TensorShape shape{2, 1, 2, 4};
1119 
1122 
1124  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1125  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1127  armnn::IConnectableLayer* const equalLayer = network->AddComparisonLayer(greaterDescriptor, layerName.c_str());
1128  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1129 
1130  inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
1131  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
1132  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1133 
1134  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1135  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1136  equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1137 
1138  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1139  CHECK(deserializedNetwork);
1140 
1141  LayerVerifierBase verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
1142  deserializedNetwork->ExecuteStrategy(verifier);
1143 }
1144 
1145 
1146 TEST_CASE("SerializeInstanceNormalization")
1147 {
1148  const std::string layerName("instanceNormalization");
1149  const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32);
1150 
1152  descriptor.m_Gamma = 1.1f;
1153  descriptor.m_Beta = 0.1f;
1154  descriptor.m_Eps = 0.0001f;
1155  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
1156 
1158  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1159  armnn::IConnectableLayer* const instanceNormLayer =
1160  network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1161  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1162 
1163  inputLayer->GetOutputSlot(0).Connect(instanceNormLayer->GetInputSlot(0));
1164  instanceNormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1165 
1166  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1167  instanceNormLayer->GetOutputSlot(0).SetTensorInfo(info);
1168 
1169  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1170  CHECK(deserializedNetwork);
1171 
1173  layerName, {info}, {info}, descriptor);
1174  deserializedNetwork->ExecuteStrategy(verifier);
1175 }
1176 
1177 TEST_CASE("SerializeL2Normalization")
1178 {
1179  const std::string l2NormLayerName("l2Normalization");
1180  const armnn::TensorInfo info({1, 2, 1, 5}, armnn::DataType::Float32);
1181 
1184  desc.m_Eps = 0.0001f;
1185 
1187  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1188  armnn::IConnectableLayer* const l2NormLayer = network->AddL2NormalizationLayer(desc, l2NormLayerName.c_str());
1189  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1190 
1191  inputLayer0->GetOutputSlot(0).Connect(l2NormLayer->GetInputSlot(0));
1192  l2NormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1193 
1194  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1195  l2NormLayer->GetOutputSlot(0).SetTensorInfo(info);
1196 
1197  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1198  CHECK(deserializedNetwork);
1199 
1201  l2NormLayerName, {info}, {info}, desc);
1202  deserializedNetwork->ExecuteStrategy(verifier);
1203 }
1204 
1205 TEST_CASE("EnsureL2NormalizationBackwardCompatibility")
1206 {
1207  // The hex data below is a flat buffer containing a simple network with one input
1208  // a L2Normalization layer and an output layer with dimensions as per the tensor infos below.
1209  //
1210  // This test verifies that we can still read back these old style
1211  // models without the normalization epsilon value.
1212  const std::vector<uint8_t> l2NormalizationModel =
1213  {
1214  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1215  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1216  0x3C, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1217  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1218  0x04, 0x00, 0x00, 0x00, 0xD6, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
1219  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
1220  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1221  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1222  0x4C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
1223  0x00, 0x20, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1224  0x20, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x06, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1225  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1226  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1F, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x20, 0x00,
1227  0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x6C, 0x32, 0x4E, 0x6F, 0x72, 0x6D, 0x61, 0x6C, 0x69, 0x7A, 0x61, 0x74,
1228  0x69, 0x6F, 0x6E, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00,
1229  0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1230  0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
1231  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
1232  0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1233  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1234  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1235  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1236  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1237  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1238  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1239  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1240  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1241  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1242  0x05, 0x00, 0x00, 0x00, 0x00
1243  };
1244 
1245  armnn::INetworkPtr deserializedNetwork =
1246  DeserializeNetwork(std::string(l2NormalizationModel.begin(), l2NormalizationModel.end()));
1247  CHECK(deserializedNetwork);
1248 
1249  const std::string layerName("l2Normalization");
1250  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 2, 1, 5}, armnn::DataType::Float32);
1251 
1254  // Since this variable does not exist in the l2NormalizationModel dump, the default value will be loaded
1255  desc.m_Eps = 1e-12f;
1256 
1258  layerName, {inputInfo}, {inputInfo}, desc);
1259  deserializedNetwork->ExecuteStrategy(verifier);
1260 }
1261 
1262 TEST_CASE("SerializeLogicalBinary")
1263 {
1264  const std::string layerName("logicalBinaryAnd");
1265 
1266  const armnn::TensorShape shape{2, 1, 2, 2};
1267 
1270 
1272 
1274  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1275  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1276  armnn::IConnectableLayer* const logicalBinaryLayer = network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1277  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1278 
1279  inputLayer0->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(0));
1280  inputLayer1->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(1));
1281  logicalBinaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1282 
1283  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1284  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1285  logicalBinaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1286 
1287  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1288  CHECK(deserializedNetwork);
1289 
1291  layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
1292  deserializedNetwork->ExecuteStrategy(verifier);
1293 }
1294 
1295 TEST_CASE("SerializeLogSoftmax")
1296 {
1297  const std::string layerName("log_softmax");
1299 
1300  armnn::LogSoftmaxDescriptor descriptor;
1301  descriptor.m_Beta = 1.0f;
1302  descriptor.m_Axis = -1;
1303 
1305  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1306  armnn::IConnectableLayer* const logSoftmaxLayer = network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1307  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1308 
1309  inputLayer->GetOutputSlot(0).Connect(logSoftmaxLayer->GetInputSlot(0));
1310  logSoftmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1311 
1312  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1313  logSoftmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
1314 
1315  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1316  CHECK(deserializedNetwork);
1317 
1318  LayerVerifierBaseWithDescriptor<armnn::LogSoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
1319  deserializedNetwork->ExecuteStrategy(verifier);
1320 }
1321 
1322 TEST_CASE("SerializeMaximum")
1323 {
1324  const std::string layerName("maximum");
1325  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1326 
1328  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1329  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1330  armnn::IConnectableLayer* const maximumLayer = network->AddMaximumLayer(layerName.c_str());
1331  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1332 
1333  inputLayer0->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(0));
1334  inputLayer1->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(1));
1335  maximumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1336 
1337  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1338  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1339  maximumLayer->GetOutputSlot(0).SetTensorInfo(info);
1340 
1341  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1342  CHECK(deserializedNetwork);
1343 
1344  LayerVerifierBase verifier(layerName, {info, info}, {info});
1345  deserializedNetwork->ExecuteStrategy(verifier);
1346 }
1347 
1348 TEST_CASE("SerializeMean")
1349 {
1350  const std::string layerName("mean");
1351  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1352  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1353 
1354  armnn::MeanDescriptor descriptor;
1355  descriptor.m_Axis = { 2 };
1356  descriptor.m_KeepDims = true;
1357 
1359  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1360  armnn::IConnectableLayer* const meanLayer = network->AddMeanLayer(descriptor, layerName.c_str());
1361  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1362 
1363  inputLayer->GetOutputSlot(0).Connect(meanLayer->GetInputSlot(0));
1364  meanLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1365 
1366  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1367  meanLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1368 
1369  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1370  CHECK(deserializedNetwork);
1371 
1372  LayerVerifierBaseWithDescriptor<armnn::MeanDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1373  deserializedNetwork->ExecuteStrategy(verifier);
1374 }
1375 
1376 TEST_CASE("SerializeMerge")
1377 {
1378  const std::string layerName("merge");
1379  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1380 
1382  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1383  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1384  armnn::IConnectableLayer* const mergeLayer = network->AddMergeLayer(layerName.c_str());
1385  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1386 
1387  inputLayer0->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(0));
1388  inputLayer1->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(1));
1389  mergeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1390 
1391  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1392  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1393  mergeLayer->GetOutputSlot(0).SetTensorInfo(info);
1394 
1395  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1396  CHECK(deserializedNetwork);
1397 
1398  LayerVerifierBase verifier(layerName, {info, info}, {info});
1399  deserializedNetwork->ExecuteStrategy(verifier);
1400 }
1401 
1402 class MergerLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>
1403 {
1404 public:
1405  MergerLayerVerifier(const std::string& layerName,
1406  const std::vector<armnn::TensorInfo>& inputInfos,
1407  const std::vector<armnn::TensorInfo>& outputInfos,
1408  const armnn::OriginsDescriptor& descriptor)
1409  : LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1410 
1411  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1412  const armnn::BaseDescriptor& descriptor,
1413  const std::vector<armnn::ConstTensor>& constants,
1414  const char* name,
1415  const armnn::LayerBindingId id = 0) override
1416  {
1417  armnn::IgnoreUnused(descriptor, constants, id);
1418  switch (layer->GetType())
1419  {
1420  case armnn::LayerType::Input: break;
1421  case armnn::LayerType::Output: break;
1423  {
1424  throw armnn::Exception("MergerLayer should have translated to ConcatLayer");
1425  break;
1426  }
1428  {
1429  VerifyNameAndConnections(layer, name);
1430  const armnn::MergerDescriptor& layerDescriptor =
1431  static_cast<const armnn::MergerDescriptor&>(descriptor);
1432  VerifyDescriptor(layerDescriptor);
1433  break;
1434  }
1435  default:
1436  {
1437  throw armnn::Exception("Unexpected layer type in Merge test model");
1438  }
1439  }
1440  }
1441 };
1442 
1443 TEST_CASE("EnsureMergerLayerBackwardCompatibility")
1444 {
1445  // The hex data below is a flat buffer containing a simple network with two inputs
1446  // a merger layer (now deprecated) and an output layer with dimensions as per the tensor infos below.
1447  //
1448  // This test verifies that we can still read back these old style
1449  // models replacing the MergerLayers with ConcatLayers with the same parameters.
1450  const std::vector<uint8_t> mergerModel =
1451  {
1452  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1453  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1454  0x38, 0x02, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1455  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1456  0xF4, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1457  0x00, 0x00, 0x9A, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1458  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1459  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1460  0xF8, 0xFE, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1461  0x00, 0x1F, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1462  0x68, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1463  0x0C, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1464  0x02, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x22, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1465  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1466  0x00, 0x00, 0x00, 0x00, 0x3E, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1467  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1468  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1469  0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x6D, 0x65, 0x72, 0x67, 0x65, 0x72, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1470  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1471  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1472  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00,
1473  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1474  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1475  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1476  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1477  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1478  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1479  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1480  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1481  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1482  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1483  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1484  0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1485  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1486  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1487  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1488  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1489  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1490  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1491  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1492  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1493  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1494  0x02, 0x00, 0x00, 0x00
1495  };
1496 
1497  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(mergerModel.begin(), mergerModel.end()));
1498  CHECK(deserializedNetwork);
1499 
1500  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 2, 3, 2, 2 }, armnn::DataType::Float32);
1501  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 4, 3, 2, 2 }, armnn::DataType::Float32);
1502 
1503  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1504 
1505  armnn::OriginsDescriptor descriptor =
1506  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1507 
1508  MergerLayerVerifier verifier("merger", { inputInfo, inputInfo }, { outputInfo }, descriptor);
1509  deserializedNetwork->ExecuteStrategy(verifier);
1510 }
1511 
1512 TEST_CASE("SerializeConcat")
1513 {
1514  const std::string layerName("concat");
1515  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1516  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1517 
1518  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1519 
1520  armnn::OriginsDescriptor descriptor =
1521  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1522 
1524  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1525  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1526  armnn::IConnectableLayer* const concatLayer = network->AddConcatLayer(descriptor, layerName.c_str());
1527  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1528 
1529  inputLayerOne->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
1530  inputLayerTwo->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
1531  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1532 
1533  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1534  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1535  concatLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1536 
1537  std::string concatLayerNetwork = SerializeNetwork(*network);
1538  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(concatLayerNetwork);
1539  CHECK(deserializedNetwork);
1540 
1541  // NOTE: using the MergerLayerVerifier to ensure that it is a concat layer and not a
1542  // merger layer that gets placed into the graph.
1543  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1544  deserializedNetwork->ExecuteStrategy(verifier);
1545 }
1546 
1547 TEST_CASE("SerializeMinimum")
1548 {
1549  const std::string layerName("minimum");
1550  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1551 
1553  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1554  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1555  armnn::IConnectableLayer* const minimumLayer = network->AddMinimumLayer(layerName.c_str());
1556  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1557 
1558  inputLayer0->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(0));
1559  inputLayer1->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(1));
1560  minimumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1561 
1562  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1563  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1564  minimumLayer->GetOutputSlot(0).SetTensorInfo(info);
1565 
1566  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1567  CHECK(deserializedNetwork);
1568 
1569  LayerVerifierBase verifier(layerName, {info, info}, {info});
1570  deserializedNetwork->ExecuteStrategy(verifier);
1571 }
1572 
1573 TEST_CASE("SerializeMultiplication")
1574 {
1575  const std::string layerName("multiplication");
1576  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
1577 
1579  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1580  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1581  armnn::IConnectableLayer* const multiplicationLayer = network->AddMultiplicationLayer(layerName.c_str());
1582  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1583 
1584  inputLayer0->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
1585  inputLayer1->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
1586  multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1587 
1588  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1589  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1590  multiplicationLayer->GetOutputSlot(0).SetTensorInfo(info);
1591 
1592  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1593  CHECK(deserializedNetwork);
1594 
1595  LayerVerifierBase verifier(layerName, {info, info}, {info});
1596  deserializedNetwork->ExecuteStrategy(verifier);
1597 }
1598 
1599 TEST_CASE("SerializePrelu")
1600 {
1601  const std::string layerName("prelu");
1602 
1603  armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32);
1604  armnn::TensorInfo alphaTensorInfo ({ 5, 4, 3, 1 }, armnn::DataType::Float32);
1605  armnn::TensorInfo outputTensorInfo({ 5, 4, 3, 2 }, armnn::DataType::Float32);
1606 
1608  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1609  armnn::IConnectableLayer* const alphaLayer = network->AddInputLayer(1);
1610  armnn::IConnectableLayer* const preluLayer = network->AddPreluLayer(layerName.c_str());
1611  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1612 
1613  inputLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(0));
1614  alphaLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(1));
1615  preluLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1616 
1617  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1618  alphaLayer->GetOutputSlot(0).SetTensorInfo(alphaTensorInfo);
1619  preluLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1620 
1621  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1622  CHECK(deserializedNetwork);
1623 
1624  LayerVerifierBase verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
1625  deserializedNetwork->ExecuteStrategy(verifier);
1626 }
1627 
1628 TEST_CASE("SerializeNormalization")
1629 {
1630  const std::string layerName("normalization");
1631  const armnn::TensorInfo info({2, 1, 2, 2}, armnn::DataType::Float32);
1632 
1635  desc.m_NormSize = 3;
1636  desc.m_Alpha = 1;
1637  desc.m_Beta = 1;
1638  desc.m_K = 1;
1639 
1641  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1642  armnn::IConnectableLayer* const normalizationLayer = network->AddNormalizationLayer(desc, layerName.c_str());
1643  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1644 
1645  inputLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
1646  normalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1647 
1648  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1649  normalizationLayer->GetOutputSlot(0).SetTensorInfo(info);
1650 
1651  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1652  CHECK(deserializedNetwork);
1653 
1655  deserializedNetwork->ExecuteStrategy(verifier);
1656 }
1657 
1658 TEST_CASE("SerializePad")
1659 {
1660  const std::string layerName("pad");
1661  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1662  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1663 
1664  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1665 
1667  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1668  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1669  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1670 
1671  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1672  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1673 
1674  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1675  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1676 
1677  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1678  CHECK(deserializedNetwork);
1679 
1681  {inputTensorInfo},
1682  {outputTensorInfo},
1683  desc);
1684  deserializedNetwork->ExecuteStrategy(verifier);
1685 }
1686 
1687 TEST_CASE("SerializePadReflect")
1688 {
1689  const std::string layerName("padReflect");
1690  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1691  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1692 
1693  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1695 
1697  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1698  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1699  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1700 
1701  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1702  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1703 
1704  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1705  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1706 
1707  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1708  CHECK(deserializedNetwork);
1709 
1711  {inputTensorInfo},
1712  {outputTensorInfo},
1713  desc);
1714  deserializedNetwork->ExecuteStrategy(verifier);
1715 }
1716 
1717 TEST_CASE("EnsurePadBackwardCompatibility")
1718 {
1719  // The PadDescriptor is being extended with a float PadValue (so a value other than 0
1720  // can be used to pad the tensor.
1721  //
1722  // This test contains a binary representation of a simple input->pad->output network
1723  // prior to this change to test that the descriptor has been updated in a backward
1724  // compatible way with respect to Deserialization of older binary dumps
1725  const std::vector<uint8_t> padModel =
1726  {
1727  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1728  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1729  0x54, 0x01, 0x00, 0x00, 0x6C, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1730  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD0, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1731  0x04, 0x00, 0x00, 0x00, 0x96, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1732  0x00, 0x00, 0x72, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1733  0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
1734  0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00,
1735  0x00, 0x00, 0x00, 0x00, 0x24, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x16, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00,
1736  0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x4C, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1737  0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00,
1738  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1739  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
1740  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1741  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00,
1742  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x70, 0x61, 0x64, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00,
1743  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1744  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1745  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
1746  0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
1747  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00,
1748  0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00,
1749  0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1750  0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00,
1751  0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1752  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1753  0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1754  0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
1755  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
1756  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00
1757  };
1758 
1759  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(padModel.begin(), padModel.end()));
1760  CHECK(deserializedNetwork);
1761 
1762  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 1, 2, 3, 4 }, armnn::DataType::Float32);
1763  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 1, 3, 5, 7 }, armnn::DataType::Float32);
1764 
1765  armnn::PadDescriptor descriptor({{ 0, 0 }, { 1, 0 }, { 1, 1 }, { 1, 2 }});
1766 
1767  LayerVerifierBaseWithDescriptor<armnn::PadDescriptor> verifier("pad", { inputInfo }, { outputInfo }, descriptor);
1768  deserializedNetwork->ExecuteStrategy(verifier);
1769 }
1770 
1771 TEST_CASE("SerializePermute")
1772 {
1773  const std::string layerName("permute");
1774  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
1775  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1776 
1777  armnn::PermuteDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
1778 
1780  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1781  armnn::IConnectableLayer* const permuteLayer = network->AddPermuteLayer(descriptor, layerName.c_str());
1782  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1783 
1784  inputLayer->GetOutputSlot(0).Connect(permuteLayer->GetInputSlot(0));
1785  permuteLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1786 
1787  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1788  permuteLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1789 
1790  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1791  CHECK(deserializedNetwork);
1792 
1794  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
1795  deserializedNetwork->ExecuteStrategy(verifier);
1796 }
1797 
1798 TEST_CASE("SerializePooling2d")
1799 {
1800  const std::string layerName("pooling2d");
1801  const armnn::TensorInfo inputInfo({1, 2, 2, 1}, armnn::DataType::Float32);
1802  const armnn::TensorInfo outputInfo({1, 1, 1, 1}, armnn::DataType::Float32);
1803 
1806  desc.m_PadTop = 0;
1807  desc.m_PadBottom = 0;
1808  desc.m_PadLeft = 0;
1809  desc.m_PadRight = 0;
1810  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
1811  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1812  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1813  desc.m_PoolHeight = 2;
1814  desc.m_PoolWidth = 2;
1815  desc.m_StrideX = 2;
1816  desc.m_StrideY = 2;
1817 
1819  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1820  armnn::IConnectableLayer* const pooling2dLayer = network->AddPooling2dLayer(desc, layerName.c_str());
1821  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1822 
1823  inputLayer->GetOutputSlot(0).Connect(pooling2dLayer->GetInputSlot(0));
1824  pooling2dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1825 
1826  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1827  pooling2dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1828 
1829  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1830  CHECK(deserializedNetwork);
1831 
1833  layerName, {inputInfo}, {outputInfo}, desc);
1834  deserializedNetwork->ExecuteStrategy(verifier);
1835 }
1836 
1837 TEST_CASE("SerializeQuantize")
1838 {
1839  const std::string layerName("quantize");
1840  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1841 
1843  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1844  armnn::IConnectableLayer* const quantizeLayer = network->AddQuantizeLayer(layerName.c_str());
1845  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1846 
1847  inputLayer->GetOutputSlot(0).Connect(quantizeLayer->GetInputSlot(0));
1848  quantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1849 
1850  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1851  quantizeLayer->GetOutputSlot(0).SetTensorInfo(info);
1852 
1853  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1854  CHECK(deserializedNetwork);
1855 
1856  LayerVerifierBase verifier(layerName, {info}, {info});
1857  deserializedNetwork->ExecuteStrategy(verifier);
1858 }
1859 
1860 TEST_CASE("SerializeRank")
1861 {
1862  const std::string layerName("rank");
1863  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
1864  const armnn::TensorInfo outputInfo({1}, armnn::DataType::Signed32);
1865 
1867  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1868  armnn::IConnectableLayer* const rankLayer = network->AddRankLayer(layerName.c_str());
1869  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1870 
1871  inputLayer->GetOutputSlot(0).Connect(rankLayer->GetInputSlot(0));
1872  rankLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1873 
1874  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1875  rankLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1876 
1877  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1878  CHECK(deserializedNetwork);
1879 
1880  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
1881  deserializedNetwork->ExecuteStrategy(verifier);
1882 }
1883 
1884 TEST_CASE("SerializeReduceSum")
1885 {
1886  const std::string layerName("Reduce_Sum");
1887  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1888  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1889 
1890  armnn::ReduceDescriptor descriptor;
1891  descriptor.m_vAxis = { 2 };
1892  descriptor.m_ReduceOperation = armnn::ReduceOperation::Sum;
1893 
1895  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1896  armnn::IConnectableLayer* const reduceSumLayer = network->AddReduceLayer(descriptor, layerName.c_str());
1897  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1898 
1899  inputLayer->GetOutputSlot(0).Connect(reduceSumLayer->GetInputSlot(0));
1900  reduceSumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1901 
1902  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1903  reduceSumLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1904 
1905  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1906  CHECK(deserializedNetwork);
1907 
1908  LayerVerifierBaseWithDescriptor<armnn::ReduceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1909  deserializedNetwork->ExecuteStrategy(verifier);
1910 }
1911 
1912 TEST_CASE("SerializeReshape")
1913 {
1914  const std::string layerName("reshape");
1915  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
1916  const armnn::TensorInfo outputInfo({3, 3}, armnn::DataType::Float32);
1917 
1918  armnn::ReshapeDescriptor descriptor({3, 3});
1919 
1921  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1922  armnn::IConnectableLayer* const reshapeLayer = network->AddReshapeLayer(descriptor, layerName.c_str());
1923  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1924 
1925  inputLayer->GetOutputSlot(0).Connect(reshapeLayer->GetInputSlot(0));
1926  reshapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1927 
1928  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1929  reshapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1930 
1931  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1932  CHECK(deserializedNetwork);
1933 
1935  layerName, {inputInfo}, {outputInfo}, descriptor);
1936  deserializedNetwork->ExecuteStrategy(verifier);
1937 }
1938 
1939 TEST_CASE("SerializeResize")
1940 {
1941  const std::string layerName("resize");
1942  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
1943  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
1944 
1946  desc.m_TargetWidth = 4;
1947  desc.m_TargetHeight = 2;
1948  desc.m_Method = armnn::ResizeMethod::NearestNeighbor;
1949  desc.m_AlignCorners = true;
1950  desc.m_HalfPixelCenters = true;
1951 
1953  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1954  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
1955  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1956 
1957  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
1958  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1959 
1960  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1961  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1962 
1963  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1964  CHECK(deserializedNetwork);
1965 
1966  LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
1967  deserializedNetwork->ExecuteStrategy(verifier);
1968 }
1969 
1970 class ResizeBilinearLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor>
1971 {
1972 public:
1973  ResizeBilinearLayerVerifier(const std::string& layerName,
1974  const std::vector<armnn::TensorInfo>& inputInfos,
1975  const std::vector<armnn::TensorInfo>& outputInfos,
1976  const armnn::ResizeDescriptor& descriptor)
1978  layerName, inputInfos, outputInfos, descriptor) {}
1979 
1980  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1981  const armnn::BaseDescriptor& descriptor,
1982  const std::vector<armnn::ConstTensor>& constants,
1983  const char* name,
1984  const armnn::LayerBindingId id = 0) override
1985  {
1986  armnn::IgnoreUnused(descriptor, constants, id);
1987  switch (layer->GetType())
1988  {
1989  case armnn::LayerType::Input: break;
1990  case armnn::LayerType::Output: break;
1992  {
1993  VerifyNameAndConnections(layer, name);
1994  const armnn::ResizeDescriptor& layerDescriptor =
1995  static_cast<const armnn::ResizeDescriptor&>(descriptor);
1996  CHECK(layerDescriptor.m_Method == armnn::ResizeMethod::Bilinear);
1997  CHECK(layerDescriptor.m_TargetWidth == m_Descriptor.m_TargetWidth);
1998  CHECK(layerDescriptor.m_TargetHeight == m_Descriptor.m_TargetHeight);
1999  CHECK(layerDescriptor.m_DataLayout == m_Descriptor.m_DataLayout);
2000  CHECK(layerDescriptor.m_AlignCorners == m_Descriptor.m_AlignCorners);
2001  CHECK(layerDescriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters);
2002  break;
2003  }
2004  default:
2005  {
2006  throw armnn::Exception("Unexpected layer type in test model. ResizeBiliniar "
2007  "should have translated to Resize");
2008  }
2009  }
2010  }
2011 };
2012 
2013 TEST_CASE("SerializeResizeBilinear")
2014 {
2015  const std::string layerName("resizeBilinear");
2016  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2017  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2018 
2021  desc.m_TargetWidth = 4u;
2022  desc.m_TargetHeight = 2u;
2023  desc.m_AlignCorners = true;
2024  desc.m_HalfPixelCenters = true;
2025 
2027  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2028  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
2029  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2030 
2031  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2032  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2033 
2034  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2035  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2036 
2037  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2038  CHECK(deserializedNetwork);
2039 
2040  ResizeBilinearLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2041  deserializedNetwork->ExecuteStrategy(verifier);
2042 }
2043 
2044 TEST_CASE("EnsureResizeBilinearBackwardCompatibility")
2045 {
2046  // The hex data below is a flat buffer containing a simple network with an input,
2047  // a ResizeBilinearLayer (now deprecated and removed) and an output
2048  //
2049  // This test verifies that we can still deserialize this old-style model by replacing
2050  // the ResizeBilinearLayer with an equivalent ResizeLayer
2051  const std::vector<uint8_t> resizeBilinearModel =
2052  {
2053  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
2054  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
2055  0x50, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
2056  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD4, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
2057  0x04, 0x00, 0x00, 0x00, 0xC2, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
2058  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x8A, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
2059  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
2060  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2061  0x38, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
2062  0x00, 0x1A, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
2063  0x34, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x12, 0x00, 0x08, 0x00, 0x0C, 0x00,
2064  0x07, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
2065  0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00,
2066  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00,
2067  0x20, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x69, 0x7A, 0x65, 0x42, 0x69, 0x6C, 0x69,
2068  0x6E, 0x65, 0x61, 0x72, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
2069  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
2070  0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2071  0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
2072  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2073  0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
2074  0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
2075  0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00,
2076  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
2077  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2078  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00,
2079  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00,
2080  0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
2081  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
2082  0x00, 0x00, 0x05, 0x00, 0x00, 0x00
2083  };
2084 
2085  armnn::INetworkPtr deserializedNetwork =
2086  DeserializeNetwork(std::string(resizeBilinearModel.begin(), resizeBilinearModel.end()));
2087  CHECK(deserializedNetwork);
2088 
2089  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2090  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2091 
2092  armnn::ResizeDescriptor descriptor;
2093  descriptor.m_TargetWidth = 4u;
2094  descriptor.m_TargetHeight = 2u;
2095 
2096  ResizeBilinearLayerVerifier verifier("resizeBilinear", { inputInfo }, { outputInfo }, descriptor);
2097  deserializedNetwork->ExecuteStrategy(verifier);
2098 }
2099 
2100 TEST_CASE("SerializeShape")
2101 {
2102  const std::string layerName("shape");
2103  const armnn::TensorInfo inputInfo({1, 3, 3, 1}, armnn::DataType::Signed32);
2104  const armnn::TensorInfo outputInfo({ 4 }, armnn::DataType::Signed32);
2105 
2107  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2108  armnn::IConnectableLayer* const shapeLayer = network->AddShapeLayer(layerName.c_str());
2109  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2110 
2111  inputLayer->GetOutputSlot(0).Connect(shapeLayer->GetInputSlot(0));
2112  shapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2113 
2114  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2115  shapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2116 
2117  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2118  CHECK(deserializedNetwork);
2119 
2120  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
2121 
2122  deserializedNetwork->ExecuteStrategy(verifier);
2123 }
2124 
2125 TEST_CASE("SerializeSlice")
2126 {
2127  const std::string layerName{"slice"};
2128 
2129  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2130  const armnn::TensorInfo outputInfo = armnn::TensorInfo({2, 2, 2, 1}, armnn::DataType::Float32);
2131 
2132  armnn::SliceDescriptor descriptor({ 0, 0, 1, 0}, {2, 2, 2, 1});
2133 
2135 
2136  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2137  armnn::IConnectableLayer* const sliceLayer = network->AddSliceLayer(descriptor, layerName.c_str());
2138  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2139 
2140  inputLayer->GetOutputSlot(0).Connect(sliceLayer->GetInputSlot(0));
2141  sliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2142 
2143  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2144  sliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2145 
2146  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2147  CHECK(deserializedNetwork);
2148 
2149  LayerVerifierBaseWithDescriptor<armnn::SliceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2150  deserializedNetwork->ExecuteStrategy(verifier);
2151 }
2152 
2153 TEST_CASE("SerializeSoftmax")
2154 {
2155  const std::string layerName("softmax");
2157 
2158  armnn::SoftmaxDescriptor descriptor;
2159  descriptor.m_Beta = 1.0f;
2160 
2162  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2163  armnn::IConnectableLayer* const softmaxLayer = network->AddSoftmaxLayer(descriptor, layerName.c_str());
2164  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2165 
2166  inputLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
2167  softmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2168 
2169  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2170  softmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
2171 
2172  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2173  CHECK(deserializedNetwork);
2174 
2175  LayerVerifierBaseWithDescriptor<armnn::SoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
2176  deserializedNetwork->ExecuteStrategy(verifier);
2177 }
2178 
2179 TEST_CASE("SerializeSpaceToBatchNd")
2180 {
2181  const std::string layerName("spaceToBatchNd");
2182  const armnn::TensorInfo inputInfo({2, 1, 2, 4}, armnn::DataType::Float32);
2183  const armnn::TensorInfo outputInfo({8, 1, 1, 3}, armnn::DataType::Float32);
2184 
2187  desc.m_BlockShape = {2, 2};
2188  desc.m_PadList = {{0, 0}, {2, 0}};
2189 
2191  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2192  armnn::IConnectableLayer* const spaceToBatchNdLayer = network->AddSpaceToBatchNdLayer(desc, layerName.c_str());
2193  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2194 
2195  inputLayer->GetOutputSlot(0).Connect(spaceToBatchNdLayer->GetInputSlot(0));
2196  spaceToBatchNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2197 
2198  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2199  spaceToBatchNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2200 
2201  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2202  CHECK(deserializedNetwork);
2203 
2205  layerName, {inputInfo}, {outputInfo}, desc);
2206  deserializedNetwork->ExecuteStrategy(verifier);
2207 }
2208 
2209 TEST_CASE("SerializeSpaceToDepth")
2210 {
2211  const std::string layerName("spaceToDepth");
2212 
2213  const armnn::TensorInfo inputInfo ({ 1, 16, 8, 3 }, armnn::DataType::Float32);
2214  const armnn::TensorInfo outputInfo({ 1, 8, 4, 12 }, armnn::DataType::Float32);
2215 
2217  desc.m_BlockSize = 2;
2218  desc.m_DataLayout = armnn::DataLayout::NHWC;
2219 
2221  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2222  armnn::IConnectableLayer* const spaceToDepthLayer = network->AddSpaceToDepthLayer(desc, layerName.c_str());
2223  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2224 
2225  inputLayer->GetOutputSlot(0).Connect(spaceToDepthLayer->GetInputSlot(0));
2226  spaceToDepthLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2227 
2228  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2229  spaceToDepthLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2230 
2231  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2232  CHECK(deserializedNetwork);
2233 
2235  layerName, {inputInfo}, {outputInfo}, desc);
2236  deserializedNetwork->ExecuteStrategy(verifier);
2237 }
2238 
2239 TEST_CASE("SerializeSplitter")
2240 {
2241  const unsigned int numViews = 3;
2242  const unsigned int numDimensions = 4;
2243  const unsigned int inputShape[] = {1, 18, 4, 4};
2244  const unsigned int outputShape[] = {1, 6, 4, 4};
2245 
2246  // This is modelled on how the caffe parser sets up a splitter layer to partition an input along dimension one.
2247  unsigned int splitterDimSizes[4] = {static_cast<unsigned int>(inputShape[0]),
2248  static_cast<unsigned int>(inputShape[1]),
2249  static_cast<unsigned int>(inputShape[2]),
2250  static_cast<unsigned int>(inputShape[3])};
2251  splitterDimSizes[1] /= numViews;
2252  armnn::ViewsDescriptor desc(numViews, numDimensions);
2253 
2254  for (unsigned int g = 0; g < numViews; ++g)
2255  {
2256  desc.SetViewOriginCoord(g, 1, splitterDimSizes[1] * g);
2257 
2258  for (unsigned int dimIdx=0; dimIdx < 4; dimIdx++)
2259  {
2260  desc.SetViewSize(g, dimIdx, splitterDimSizes[dimIdx]);
2261  }
2262  }
2263 
2264  const std::string layerName("splitter");
2265  const armnn::TensorInfo inputInfo(numDimensions, inputShape, armnn::DataType::Float32);
2266  const armnn::TensorInfo outputInfo(numDimensions, outputShape, armnn::DataType::Float32);
2267 
2269  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2270  armnn::IConnectableLayer* const splitterLayer = network->AddSplitterLayer(desc, layerName.c_str());
2271  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2272  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2273  armnn::IConnectableLayer* const outputLayer2 = network->AddOutputLayer(2);
2274 
2275  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
2276  splitterLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2277  splitterLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2278  splitterLayer->GetOutputSlot(2).Connect(outputLayer2->GetInputSlot(0));
2279 
2280  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2281  splitterLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2282  splitterLayer->GetOutputSlot(1).SetTensorInfo(outputInfo);
2283  splitterLayer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2284 
2285  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2286  CHECK(deserializedNetwork);
2287 
2289  layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc);
2290  deserializedNetwork->ExecuteStrategy(verifier);
2291 }
2292 
2293 TEST_CASE("SerializeStack")
2294 {
2295  const std::string layerName("stack");
2296 
2297  armnn::TensorInfo inputTensorInfo ({4, 3, 5}, armnn::DataType::Float32);
2298  armnn::TensorInfo outputTensorInfo({4, 3, 2, 5}, armnn::DataType::Float32);
2299 
2300  armnn::StackDescriptor descriptor(2, 2, {4, 3, 5});
2301 
2303  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
2304  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
2305  armnn::IConnectableLayer* const stackLayer = network->AddStackLayer(descriptor, layerName.c_str());
2306  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2307 
2308  inputLayer1->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(0));
2309  inputLayer2->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(1));
2310  stackLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2311 
2312  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2313  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2314  stackLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2315 
2316  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2317  CHECK(deserializedNetwork);
2318 
2320  layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor);
2321  deserializedNetwork->ExecuteStrategy(verifier);
2322 }
2323 
2324 TEST_CASE("SerializeStandIn")
2325 {
2326  const std::string layerName("standIn");
2327 
2328  armnn::TensorInfo tensorInfo({ 1u }, armnn::DataType::Float32);
2329  armnn::StandInDescriptor descriptor(2u, 2u);
2330 
2332  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2333  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2334  armnn::IConnectableLayer* const standInLayer = network->AddStandInLayer(descriptor, layerName.c_str());
2335  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2336  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2337 
2338  inputLayer0->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(0));
2339  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2340 
2341  inputLayer1->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(1));
2342  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2343 
2344  standInLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2345  standInLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2346 
2347  standInLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2348  standInLayer->GetOutputSlot(1).SetTensorInfo(tensorInfo);
2349 
2350  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2351  CHECK(deserializedNetwork);
2352 
2354  layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor);
2355  deserializedNetwork->ExecuteStrategy(verifier);
2356 }
2357 
2358 TEST_CASE("SerializeStridedSlice")
2359 {
2360  const std::string layerName("stridedSlice");
2361  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2362  const armnn::TensorInfo outputInfo = armnn::TensorInfo({3, 1}, armnn::DataType::Float32);
2363 
2364  armnn::StridedSliceDescriptor desc({0, 0, 1, 0}, {1, 1, 1, 1}, {1, 1, 1, 1});
2365  desc.m_EndMask = (1 << 4) - 1;
2366  desc.m_ShrinkAxisMask = (1 << 1) | (1 << 2);
2367  desc.m_DataLayout = armnn::DataLayout::NCHW;
2368 
2370  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2371  armnn::IConnectableLayer* const stridedSliceLayer = network->AddStridedSliceLayer(desc, layerName.c_str());
2372  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2373 
2374  inputLayer->GetOutputSlot(0).Connect(stridedSliceLayer->GetInputSlot(0));
2375  stridedSliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2376 
2377  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2378  stridedSliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2379 
2380  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2381  CHECK(deserializedNetwork);
2382 
2384  layerName, {inputInfo}, {outputInfo}, desc);
2385  deserializedNetwork->ExecuteStrategy(verifier);
2386 }
2387 
2388 TEST_CASE("SerializeSubtraction")
2389 {
2390  const std::string layerName("subtraction");
2392 
2394  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2395  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2396  armnn::IConnectableLayer* const subtractionLayer = network->AddSubtractionLayer(layerName.c_str());
2397  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2398 
2399  inputLayer0->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(0));
2400  inputLayer1->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(1));
2401  subtractionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2402 
2403  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
2404  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
2405  subtractionLayer->GetOutputSlot(0).SetTensorInfo(info);
2406 
2407  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2408  CHECK(deserializedNetwork);
2409 
2410  LayerVerifierBase verifier(layerName, {info, info}, {info});
2411  deserializedNetwork->ExecuteStrategy(verifier);
2412 }
2413 
2414 TEST_CASE("SerializeSwitch")
2415 {
2416  class SwitchLayerVerifier : public LayerVerifierBase
2417  {
2418  public:
2419  SwitchLayerVerifier(const std::string& layerName,
2420  const std::vector<armnn::TensorInfo>& inputInfos,
2421  const std::vector<armnn::TensorInfo>& outputInfos)
2422  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
2423 
2424  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2425  const armnn::BaseDescriptor& descriptor,
2426  const std::vector<armnn::ConstTensor>& constants,
2427  const char* name,
2428  const armnn::LayerBindingId id = 0) override
2429  {
2430  armnn::IgnoreUnused(descriptor, constants, id);
2431  switch (layer->GetType())
2432  {
2433  case armnn::LayerType::Input: break;
2434  case armnn::LayerType::Output: break;
2435  case armnn::LayerType::Constant: break;
2437  {
2438  VerifyNameAndConnections(layer, name);
2439  break;
2440  }
2441  default:
2442  {
2443  throw armnn::Exception("Unexpected layer type in Switch test model");
2444  }
2445  }
2446  }
2447  };
2448 
2449  const std::string layerName("switch");
2450  const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32, 0.0f, 0, true);
2451 
2452  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2453  armnn::ConstTensor constTensor(info, constantData);
2454 
2456  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2457  armnn::IConnectableLayer* const constantLayer = network->AddConstantLayer(constTensor, "constant");
2458  armnn::IConnectableLayer* const switchLayer = network->AddSwitchLayer(layerName.c_str());
2459  armnn::IConnectableLayer* const trueOutputLayer = network->AddOutputLayer(0);
2460  armnn::IConnectableLayer* const falseOutputLayer = network->AddOutputLayer(1);
2461 
2462  inputLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(0));
2463  constantLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(1));
2464  switchLayer->GetOutputSlot(0).Connect(trueOutputLayer->GetInputSlot(0));
2465  switchLayer->GetOutputSlot(1).Connect(falseOutputLayer->GetInputSlot(0));
2466 
2467  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2468  constantLayer->GetOutputSlot(0).SetTensorInfo(info);
2469  switchLayer->GetOutputSlot(0).SetTensorInfo(info);
2470  switchLayer->GetOutputSlot(1).SetTensorInfo(info);
2471 
2472  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2473  CHECK(deserializedNetwork);
2474 
2475  SwitchLayerVerifier verifier(layerName, {info, info}, {info, info});
2476  deserializedNetwork->ExecuteStrategy(verifier);
2477 }
2478 
2479 TEST_CASE("SerializeTranspose")
2480 {
2481  const std::string layerName("transpose");
2482  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
2483  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
2484 
2485  armnn::TransposeDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
2486 
2488  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2489  armnn::IConnectableLayer* const transposeLayer = network->AddTransposeLayer(descriptor, layerName.c_str());
2490  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2491 
2492  inputLayer->GetOutputSlot(0).Connect(transposeLayer->GetInputSlot(0));
2493  transposeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2494 
2495  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2496  transposeLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2497 
2498  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2499  CHECK(deserializedNetwork);
2500 
2502  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
2503  deserializedNetwork->ExecuteStrategy(verifier);
2504 }
2505 
2506 TEST_CASE("SerializeTransposeConvolution2d")
2507 {
2508  const std::string layerName("transposeConvolution2d");
2509  const armnn::TensorInfo inputInfo ({ 1, 7, 7, 1 }, armnn::DataType::Float32);
2510  const armnn::TensorInfo outputInfo({ 1, 9, 9, 1 }, armnn::DataType::Float32);
2511 
2512  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
2513  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
2514 
2515  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
2516  armnn::ConstTensor weights(weightsInfo, weightsData);
2517 
2518  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
2519  armnn::ConstTensor biases(biasesInfo, biasesData);
2520 
2522  descriptor.m_PadLeft = 1;
2523  descriptor.m_PadRight = 1;
2524  descriptor.m_PadTop = 1;
2525  descriptor.m_PadBottom = 1;
2526  descriptor.m_StrideX = 1;
2527  descriptor.m_StrideY = 1;
2528  descriptor.m_BiasEnabled = true;
2530 
2532  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2533  armnn::IConnectableLayer* const convLayer =
2534  network->AddTransposeConvolution2dLayer(descriptor,
2535  weights,
2537  layerName.c_str());
2538  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2539 
2540  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2541  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2542 
2543  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2544  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2545 
2546  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2547  CHECK(deserializedNetwork);
2548 
2549  const std::vector<armnn::ConstTensor> constants {weights, biases};
2551  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
2552  deserializedNetwork->ExecuteStrategy(verifier);
2553 }
2554 
2555 TEST_CASE("SerializeDeserializeNonLinearNetwork")
2556 {
2557  class ConstantLayerVerifier : public LayerVerifierBase
2558  {
2559  public:
2560  ConstantLayerVerifier(const std::string& layerName,
2561  const std::vector<armnn::TensorInfo>& inputInfos,
2562  const std::vector<armnn::TensorInfo>& outputInfos,
2563  const armnn::ConstTensor& layerInput)
2564  : LayerVerifierBase(layerName, inputInfos, outputInfos)
2565  , m_LayerInput(layerInput) {}
2566 
2567  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2568  const armnn::BaseDescriptor& descriptor,
2569  const std::vector<armnn::ConstTensor>& constants,
2570  const char* name,
2571  const armnn::LayerBindingId id = 0) override
2572  {
2573  armnn::IgnoreUnused(descriptor, constants, id);
2574  switch (layer->GetType())
2575  {
2576  case armnn::LayerType::Input: break;
2577  case armnn::LayerType::Output: break;
2578  case armnn::LayerType::Addition: break;
2580  {
2581  VerifyNameAndConnections(layer, name);
2582  CompareConstTensor(constants.at(0), m_LayerInput);
2583  break;
2584  }
2585  default:
2586  {
2587  throw armnn::Exception("Unexpected layer type in test model");
2588  }
2589  }
2590  }
2591 
2592  private:
2593  armnn::ConstTensor m_LayerInput;
2594  };
2595 
2596  const std::string layerName("constant");
2597  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
2598 
2599  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2600  armnn::ConstTensor constTensor(info, constantData);
2601 
2603  armnn::IConnectableLayer* input = network->AddInputLayer(0);
2604  armnn::IConnectableLayer* add = network->AddAdditionLayer();
2605  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
2606  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
2607 
2608  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
2609  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
2610  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2611 
2612  input->GetOutputSlot(0).SetTensorInfo(info);
2613  constant->GetOutputSlot(0).SetTensorInfo(info);
2614  add->GetOutputSlot(0).SetTensorInfo(info);
2615 
2616  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2617  CHECK(deserializedNetwork);
2618 
2619  ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
2620  deserializedNetwork->ExecuteStrategy(verifier);
2621 }
2622 
2623 }
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
float m_ScaleW
Center size encoding scale weight.
bool m_BiasEnabled
Enable/disable bias.
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
uint32_t m_PadBottom
Padding bottom value in the height dimension.
A ReshapeDescriptor for the ReshapeLayer.
armnn::INetworkPtr DeserializeNetwork(const std::string &serializerString)
uint32_t m_PadBack
Padding back value in the depth dimension.
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ComparisonDescriptor for the ComparisonLayer.
Definition: Descriptors.hpp:78
float m_ScaleX
Center size encoding scale x.
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A Convolution2dDescriptor for the Convolution2dLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
bool m_BiasEnabled
Enable/disable bias.
ResizeMethod m_Method
The Interpolation method to use (Bilinear, NearestNeighbor).
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
float m_Beta
Exponentiation value.
The padding fields don&#39;t count and are ignored.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
Definition: Descriptors.hpp:70
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
A LogicalBinaryDescriptor for the LogicalBinaryLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Copyright (c) 2021 ARM Limited and Contributors.
void IgnoreUnused(Ts &&...)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_DilationY
Dilation along y axis.
int32_t m_EndMask
End mask value.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_DilationY
Dilation factor value for height dimension.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:277
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A ResizeBilinearDescriptor for the ResizeBilinearLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
Base class for all descriptors.
Definition: Descriptors.hpp:22
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
A StackDescriptor for the StackLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
constexpr char const * GetUnaryOperationAsCString(UnaryOperation operation)
Definition: TypesUtils.hpp:71
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
A PadDescriptor for the PadLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
DataType
Definition: Types.hpp:35
float m_NmsIouThreshold
Intersection over union threshold.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
uint32_t m_DilationX
Dilation factor value for width dimension.
uint32_t m_PadTop
Padding top value in the height dimension.
Status SetViewSize(uint32_t view, uint32_t coord, uint32_t value)
Set the size of the views.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A L2NormalizationDescriptor for the L2NormalizationLayer.
void VerifyNameAndConnections(const armnn::IConnectableLayer *layer, const char *name)
An ArgMinMaxDescriptor for ArgMinMaxLayer.
Definition: Descriptors.hpp:56
An OriginsDescriptor for the ConcatLayer.
A ReduceDescriptor for the REDUCE operators.
A FullyConnectedDescriptor for the FullyConnectedLayer.
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
uint32_t m_TargetWidth
Target width value.
A GatherDescriptor for the GatherLayer.
uint32_t m_NumClasses
Number of classes.
bool m_HalfPixelCenters
Half Pixel Centers.
uint32_t m_PadTop
Padding top value in the height dimension.
A StandInDescriptor for the StandIn layer.
LayerVerifierBase(const std::string &layerName, const std::vector< armnn::TensorInfo > &inputInfos, const std::vector< armnn::TensorInfo > &outputInfos)
bool m_UseRegularNms
Use Regular NMS.
uint32_t m_PadFront
Padding front value in the depth dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadLeft
Padding left value in the width dimension.
uint32_t m_TargetHeight
Target height value.
A SliceDescriptor for the SliceLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A Convolution3dDescriptor for the Convolution3dLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
PaddingMode m_PaddingMode
Specifies the Padding mode (Constant, Reflect or Symmetric)
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
float m_ScaleH
Center size encoding scale height.
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
DataLayout m_DataLayout
The data layout to be used (NDHWC, NCDHW).
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_PadLeft
Padding left value in the width dimension.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
bool m_AlignCorners
Aligned corners.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int32_t m_Axis
The axis in params to gather indices from.
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
Definition: Descriptors.hpp:98
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
TEST_SUITE("SerializerTests")
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
uint32_t m_PadTop
Padding top value in the height dimension.
void CompareConstTensor(const armnn::ConstTensor &tensor1, const armnn::ConstTensor &tensor2)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
A MeanDescriptor for the MeanLayer.
UnaryOperation
Definition: Types.hpp:111
uint32_t m_PadRight
Padding right value in the width dimension.
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
float m_ScaleY
Center size encoding scale y.
OriginsDescriptor CreateDescriptorForConcatenation(TensorShapeIt first, TensorShapeIt last, unsigned int concatenationDimension)
Convenience template to create an OriginsDescriptor to use when creating a ConcatLayer for performing...
float m_NmsScoreThreshold
NMS score threshold.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:197
virtual int Connect(IInputSlot &destination)=0
A Pooling2dDescriptor for the Pooling2dLayer.
std::string SerializeNetwork(const armnn::INetwork &network)
A NormalizationDescriptor for the NormalizationLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
A ChannelShuffleDescriptor for the ChannelShuffle operator.
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:478
uint32_t m_DilationZ
Dilation along z axis.
A SoftmaxDescriptor for the SoftmaxLayer.
uint32_t m_StrideZ
Stride value when proceeding through input for the depth dimension.
Status SetViewOriginCoord(uint32_t view, uint32_t coord, uint32_t value)
Set the view origin coordinates.
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
uint32_t m_DilationY
Dilation along y axis.
A FillDescriptor for the FillLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A PermuteDescriptor for the PermuteLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
bool m_ConstantWeights
Enable/disable constant weights and biases.