ArmNN
 21.08
SerializerTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "../Serializer.hpp"
8 
9 #include <armnn/Descriptors.hpp>
10 #include <armnn/INetwork.hpp>
11 #include <armnn/TypesUtils.hpp>
12 #include <armnn/LstmParams.hpp>
16 
17 #include <random>
18 #include <vector>
19 
20 #include <doctest/doctest.h>
21 
23 
24 TEST_SUITE("SerializerTests")
25 {
26 
27 TEST_CASE("SerializeAddition")
28 {
29  const std::string layerName("addition");
30  const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32);
31 
33  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
34  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
35  armnn::IConnectableLayer* const additionLayer = network->AddAdditionLayer(layerName.c_str());
36  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
37 
38  inputLayer0->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
39  inputLayer1->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
40  additionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
41 
42  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
43  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
44  additionLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
45 
46  std::string serializedNetwork = SerializeNetwork(*network);
47  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(serializedNetwork);
48  CHECK(deserializedNetwork);
49 
50  LayerVerifierBase verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo});
51  deserializedNetwork->ExecuteStrategy(verifier);
52 }
53 
54 void SerializeArgMinMaxTest(armnn::DataType dataType)
55 {
56  const std::string layerName("argminmax");
57  const armnn::TensorInfo inputInfo({1, 2, 3}, armnn::DataType::Float32);
58  const armnn::TensorInfo outputInfo({1, 3}, dataType);
59 
60  armnn::ArgMinMaxDescriptor descriptor;
62  descriptor.m_Axis = 1;
63 
65  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
66  armnn::IConnectableLayer* const argMinMaxLayer = network->AddArgMinMaxLayer(descriptor, layerName.c_str());
67  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
68 
69  inputLayer->GetOutputSlot(0).Connect(argMinMaxLayer->GetInputSlot(0));
70  argMinMaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
71 
72  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
73  argMinMaxLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
74 
75  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
76  CHECK(deserializedNetwork);
77 
79  {inputInfo},
80  {outputInfo},
81  descriptor);
82  deserializedNetwork->ExecuteStrategy(verifier);
83 }
84 
85 TEST_CASE("SerializeArgMinMaxSigned32")
86 {
87  SerializeArgMinMaxTest(armnn::DataType::Signed32);
88 }
89 
90 TEST_CASE("SerializeArgMinMaxSigned64")
91 {
92  SerializeArgMinMaxTest(armnn::DataType::Signed64);
93 }
94 
95 TEST_CASE("SerializeBatchNormalization")
96 {
97  const std::string layerName("batchNormalization");
98  const armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32);
99  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
100 
101  const armnn::TensorInfo meanInfo({1}, armnn::DataType::Float32);
102  const armnn::TensorInfo varianceInfo({1}, armnn::DataType::Float32);
103  const armnn::TensorInfo betaInfo({1}, armnn::DataType::Float32);
104  const armnn::TensorInfo gammaInfo({1}, armnn::DataType::Float32);
105 
107  descriptor.m_Eps = 0.0010000000475f;
108  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
109 
110  std::vector<float> meanData({5.0});
111  std::vector<float> varianceData({2.0});
112  std::vector<float> betaData({1.0});
113  std::vector<float> gammaData({0.0});
114 
115  std::vector<armnn::ConstTensor> constants;
116  constants.emplace_back(armnn::ConstTensor(meanInfo, meanData));
117  constants.emplace_back(armnn::ConstTensor(varianceInfo, varianceData));
118  constants.emplace_back(armnn::ConstTensor(betaInfo, betaData));
119  constants.emplace_back(armnn::ConstTensor(gammaInfo, gammaData));
120 
122  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
123  armnn::IConnectableLayer* const batchNormalizationLayer =
124  network->AddBatchNormalizationLayer(descriptor,
125  constants[0],
126  constants[1],
127  constants[2],
128  constants[3],
129  layerName.c_str());
130  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
131 
132  inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
133  batchNormalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
134 
135  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
136  batchNormalizationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
137 
138  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
139  CHECK(deserializedNetwork);
140 
142  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
143  deserializedNetwork->ExecuteStrategy(verifier);
144 }
145 
146 TEST_CASE("SerializeBatchToSpaceNd")
147 {
148  const std::string layerName("spaceToBatchNd");
149  const armnn::TensorInfo inputInfo({4, 1, 2, 2}, armnn::DataType::Float32);
150  const armnn::TensorInfo outputInfo({1, 1, 4, 4}, armnn::DataType::Float32);
151 
154  desc.m_BlockShape = {2, 2};
155  desc.m_Crops = {{0, 0}, {0, 0}};
156 
158  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
159  armnn::IConnectableLayer* const batchToSpaceNdLayer = network->AddBatchToSpaceNdLayer(desc, layerName.c_str());
160  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
161 
162  inputLayer->GetOutputSlot(0).Connect(batchToSpaceNdLayer->GetInputSlot(0));
163  batchToSpaceNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
164 
165  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
166  batchToSpaceNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
167 
168  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
169  CHECK(deserializedNetwork);
170 
172  {inputInfo},
173  {outputInfo},
174  desc);
175  deserializedNetwork->ExecuteStrategy(verifier);
176 }
177 
178 TEST_CASE("SerializeCast")
179 {
180  const std::string layerName("cast");
181 
182  const armnn::TensorShape shape{1, 5, 2, 3};
183 
186 
188  armnn::IConnectableLayer* inputLayer = network->AddInputLayer(0);
189  armnn::IConnectableLayer* castLayer = network->AddCastLayer(layerName.c_str());
190  armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0);
191 
192  inputLayer->GetOutputSlot(0).Connect(castLayer->GetInputSlot(0));
193  castLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
194 
195  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
196  castLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
197 
198  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
199  CHECK(deserializedNetwork);
200 
201  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
202  deserializedNetwork->ExecuteStrategy(verifier);
203 }
204 
205 TEST_CASE("SerializeComparison")
206 {
207  const std::string layerName("comparison");
208 
209  const armnn::TensorShape shape{2, 1, 2, 4};
210 
213 
215 
217  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
218  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
219  armnn::IConnectableLayer* const comparisonLayer = network->AddComparisonLayer(descriptor, layerName.c_str());
220  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
221 
222  inputLayer0->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(0));
223  inputLayer1->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(1));
224  comparisonLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
225 
226  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
227  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
228  comparisonLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
229 
230  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
231  CHECK(deserializedNetwork);
232 
234  { inputInfo, inputInfo },
235  { outputInfo },
236  descriptor);
237  deserializedNetwork->ExecuteStrategy(verifier);
238 }
239 
240 TEST_CASE("SerializeConstant")
241 {
242  class ConstantLayerVerifier : public LayerVerifierBase
243  {
244  public:
245  ConstantLayerVerifier(const std::string& layerName,
246  const std::vector<armnn::TensorInfo>& inputInfos,
247  const std::vector<armnn::TensorInfo>& outputInfos,
248  const std::vector<armnn::ConstTensor>& constants)
249  : LayerVerifierBase(layerName, inputInfos, outputInfos)
250  , m_Constants(constants) {}
251 
252  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
253  const armnn::BaseDescriptor& descriptor,
254  const std::vector<armnn::ConstTensor>& constants,
255  const char* name,
256  const armnn::LayerBindingId id = 0) override
257  {
258  armnn::IgnoreUnused(descriptor, id);
259 
260  switch (layer->GetType())
261  {
262  case armnn::LayerType::Input: break;
263  case armnn::LayerType::Output: break;
264  case armnn::LayerType::Addition: break;
265  default:
266  {
267  this->VerifyNameAndConnections(layer, name);
268 
269  for (std::size_t i = 0; i < constants.size(); i++)
270  {
271  CompareConstTensor(constants[i], m_Constants[i]);
272  }
273  }
274  }
275  }
276 
277  private:
278  const std::vector<armnn::ConstTensor> m_Constants;
279  };
280 
281  const std::string layerName("constant");
282  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32);
283 
284  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
285  armnn::ConstTensor constTensor(info, constantData);
286 
288  armnn::IConnectableLayer* input = network->AddInputLayer(0);
289  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
290  armnn::IConnectableLayer* add = network->AddAdditionLayer();
291  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
292 
293  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
294  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
295  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
296 
297  input->GetOutputSlot(0).SetTensorInfo(info);
298  constant->GetOutputSlot(0).SetTensorInfo(info);
299  add->GetOutputSlot(0).SetTensorInfo(info);
300 
301  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
302  CHECK(deserializedNetwork);
303 
304  ConstantLayerVerifier verifier(layerName, {}, {info}, {constTensor});
305  deserializedNetwork->ExecuteStrategy(verifier);
306 }
307 
308 TEST_CASE("SerializeConvolution2d")
309 {
310  const std::string layerName("convolution2d");
311  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
312  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
313 
314  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
315  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32);
316 
317  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
318  armnn::ConstTensor weights(weightsInfo, weightsData);
319 
320  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
321  armnn::ConstTensor biases(biasesInfo, biasesData);
322 
324  descriptor.m_PadLeft = 1;
325  descriptor.m_PadRight = 1;
326  descriptor.m_PadTop = 1;
327  descriptor.m_PadBottom = 1;
328  descriptor.m_StrideX = 2;
329  descriptor.m_StrideY = 2;
330  descriptor.m_DilationX = 2;
331  descriptor.m_DilationY = 2;
332  descriptor.m_BiasEnabled = true;
334 
336  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
337  armnn::IConnectableLayer* const convLayer =
338  network->AddConvolution2dLayer(descriptor,
339  weights,
341  layerName.c_str());
342  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
343 
344  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
345  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
346 
347  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
348  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
349 
350  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
351  CHECK(deserializedNetwork);
352 
353  const std::vector<armnn::ConstTensor>& constants {weights, biases};
355  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
356  deserializedNetwork->ExecuteStrategy(verifier);
357 }
358 
359 TEST_CASE("SerializeConvolution2dWithPerAxisParams")
360 {
361  using namespace armnn;
362 
363  const std::string layerName("convolution2dWithPerAxis");
364  const TensorInfo inputInfo ({ 1, 3, 1, 2 }, DataType::QAsymmU8, 0.55f, 128);
365  const TensorInfo outputInfo({ 1, 3, 1, 3 }, DataType::QAsymmU8, 0.75f, 128);
366 
367  const std::vector<float> quantScales{ 0.75f, 0.65f, 0.85f };
368  constexpr unsigned int quantDimension = 0;
369 
370  const TensorInfo kernelInfo({ 3, 1, 1, 2 }, DataType::QSymmS8, quantScales, quantDimension);
371 
372  const std::vector<float> biasQuantScales{ 0.25f, 0.50f, 0.75f };
373  const TensorInfo biasInfo({ 3 }, DataType::Signed32, biasQuantScales, quantDimension);
374 
375  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
376  armnn::ConstTensor weights(kernelInfo, kernelData);
377  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
378  armnn::ConstTensor biases(biasInfo, biasData);
379 
380  Convolution2dDescriptor descriptor;
381  descriptor.m_StrideX = 1;
382  descriptor.m_StrideY = 1;
383  descriptor.m_PadLeft = 0;
384  descriptor.m_PadRight = 0;
385  descriptor.m_PadTop = 0;
386  descriptor.m_PadBottom = 0;
387  descriptor.m_BiasEnabled = true;
389 
391  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
392  armnn::IConnectableLayer* const convLayer =
393  network->AddConvolution2dLayer(descriptor,
394  weights,
396  layerName.c_str());
397  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
398 
399  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
400  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
401 
402  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
403  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
404 
405  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
406  CHECK(deserializedNetwork);
407 
408  const std::vector<armnn::ConstTensor>& constants {weights, biases};
410  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
411  deserializedNetwork->ExecuteStrategy(verifier);
412 }
413 
414 TEST_CASE("SerializeDepthToSpace")
415 {
416  const std::string layerName("depthToSpace");
417 
418  const armnn::TensorInfo inputInfo ({ 1, 8, 4, 12 }, armnn::DataType::Float32);
419  const armnn::TensorInfo outputInfo({ 1, 16, 8, 3 }, armnn::DataType::Float32);
420 
422  desc.m_BlockSize = 2;
423  desc.m_DataLayout = armnn::DataLayout::NHWC;
424 
426  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
427  armnn::IConnectableLayer* const depthToSpaceLayer = network->AddDepthToSpaceLayer(desc, layerName.c_str());
428  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
429 
430  inputLayer->GetOutputSlot(0).Connect(depthToSpaceLayer->GetInputSlot(0));
431  depthToSpaceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
432 
433  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
434  depthToSpaceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
435 
436  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
437  CHECK(deserializedNetwork);
438 
439  LayerVerifierBaseWithDescriptor<armnn::DepthToSpaceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
440  deserializedNetwork->ExecuteStrategy(verifier);
441 }
442 
443 TEST_CASE("SerializeDepthwiseConvolution2d")
444 {
445  const std::string layerName("depwiseConvolution2d");
446  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32);
447  const armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
448 
449  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
450  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
451 
452  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
453  armnn::ConstTensor weights(weightsInfo, weightsData);
454 
455  std::vector<int32_t> biasesData = GenerateRandomData<int32_t>(biasesInfo.GetNumElements());
456  armnn::ConstTensor biases(biasesInfo, biasesData);
457 
459  descriptor.m_PadLeft = 1;
460  descriptor.m_PadRight = 1;
461  descriptor.m_PadTop = 1;
462  descriptor.m_PadBottom = 1;
463  descriptor.m_StrideX = 2;
464  descriptor.m_StrideY = 2;
465  descriptor.m_DilationX = 2;
466  descriptor.m_DilationY = 2;
467  descriptor.m_BiasEnabled = true;
469 
471  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
472  armnn::IConnectableLayer* const depthwiseConvLayer =
473  network->AddDepthwiseConvolution2dLayer(descriptor,
474  weights,
476  layerName.c_str());
477  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
478 
479  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
480  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
481 
482  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
483  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
484 
485  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
486  CHECK(deserializedNetwork);
487 
488  const std::vector<armnn::ConstTensor>& constants {weights, biases};
490  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
491  deserializedNetwork->ExecuteStrategy(verifier);
492 }
493 
494 TEST_CASE("SerializeDepthwiseConvolution2dWithPerAxisParams")
495 {
496  using namespace armnn;
497 
498  const std::string layerName("depwiseConvolution2dWithPerAxis");
499  const TensorInfo inputInfo ({ 1, 3, 3, 2 }, DataType::QAsymmU8, 0.55f, 128);
500  const TensorInfo outputInfo({ 1, 2, 2, 4 }, DataType::QAsymmU8, 0.75f, 128);
501 
502  const std::vector<float> quantScales{ 0.75f, 0.80f, 0.90f, 0.95f };
503  const unsigned int quantDimension = 0;
504  TensorInfo kernelInfo({ 2, 2, 2, 2 }, DataType::QSymmS8, quantScales, quantDimension);
505 
506  const std::vector<float> biasQuantScales{ 0.25f, 0.35f, 0.45f, 0.55f };
507  constexpr unsigned int biasQuantDimension = 0;
508  TensorInfo biasInfo({ 4 }, DataType::Signed32, biasQuantScales, biasQuantDimension);
509 
510  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
511  armnn::ConstTensor weights(kernelInfo, kernelData);
512  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
513  armnn::ConstTensor biases(biasInfo, biasData);
514 
516  descriptor.m_StrideX = 1;
517  descriptor.m_StrideY = 1;
518  descriptor.m_PadLeft = 0;
519  descriptor.m_PadRight = 0;
520  descriptor.m_PadTop = 0;
521  descriptor.m_PadBottom = 0;
522  descriptor.m_DilationX = 1;
523  descriptor.m_DilationY = 1;
524  descriptor.m_BiasEnabled = true;
526 
528  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
529  armnn::IConnectableLayer* const depthwiseConvLayer =
530  network->AddDepthwiseConvolution2dLayer(descriptor,
531  weights,
533  layerName.c_str());
534  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
535 
536  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
537  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
538 
539  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
540  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
541 
542  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
543  CHECK(deserializedNetwork);
544 
545  const std::vector<armnn::ConstTensor>& constants {weights, biases};
547  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
548  deserializedNetwork->ExecuteStrategy(verifier);
549 }
550 
551 TEST_CASE("SerializeDequantize")
552 {
553  const std::string layerName("dequantize");
554  const armnn::TensorInfo inputInfo({ 1, 5, 2, 3 }, armnn::DataType::QAsymmU8, 0.5f, 1);
555  const armnn::TensorInfo outputInfo({ 1, 5, 2, 3 }, armnn::DataType::Float32);
556 
558  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
559  armnn::IConnectableLayer* const dequantizeLayer = network->AddDequantizeLayer(layerName.c_str());
560  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
561 
562  inputLayer->GetOutputSlot(0).Connect(dequantizeLayer->GetInputSlot(0));
563  dequantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
564 
565  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
566  dequantizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
567 
568  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
569  CHECK(deserializedNetwork);
570 
571  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
572  deserializedNetwork->ExecuteStrategy(verifier);
573 }
574 
575 TEST_CASE("SerializeDeserializeDetectionPostProcess")
576 {
577  const std::string layerName("detectionPostProcess");
578 
579  const std::vector<armnn::TensorInfo> inputInfos({
582  });
583 
584  const std::vector<armnn::TensorInfo> outputInfos({
589  });
590 
592  descriptor.m_UseRegularNms = true;
593  descriptor.m_MaxDetections = 3;
594  descriptor.m_MaxClassesPerDetection = 1;
595  descriptor.m_DetectionsPerClass =1;
596  descriptor.m_NmsScoreThreshold = 0.0;
597  descriptor.m_NmsIouThreshold = 0.5;
598  descriptor.m_NumClasses = 2;
599  descriptor.m_ScaleY = 10.0;
600  descriptor.m_ScaleX = 10.0;
601  descriptor.m_ScaleH = 5.0;
602  descriptor.m_ScaleW = 5.0;
603 
604  const armnn::TensorInfo anchorsInfo({ 6, 4 }, armnn::DataType::Float32);
605  const std::vector<float> anchorsData({
606  0.5f, 0.5f, 1.0f, 1.0f,
607  0.5f, 0.5f, 1.0f, 1.0f,
608  0.5f, 0.5f, 1.0f, 1.0f,
609  0.5f, 10.5f, 1.0f, 1.0f,
610  0.5f, 10.5f, 1.0f, 1.0f,
611  0.5f, 100.5f, 1.0f, 1.0f
612  });
613  armnn::ConstTensor anchors(anchorsInfo, anchorsData);
614 
616  armnn::IConnectableLayer* const detectionLayer =
617  network->AddDetectionPostProcessLayer(descriptor, anchors, layerName.c_str());
618 
619  for (unsigned int i = 0; i < 2; i++)
620  {
621  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(static_cast<int>(i));
622  inputLayer->GetOutputSlot(0).Connect(detectionLayer->GetInputSlot(i));
623  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfos[i]);
624  }
625 
626  for (unsigned int i = 0; i < 4; i++)
627  {
628  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(static_cast<int>(i));
629  detectionLayer->GetOutputSlot(i).Connect(outputLayer->GetInputSlot(0));
630  detectionLayer->GetOutputSlot(i).SetTensorInfo(outputInfos[i]);
631  }
632 
633  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
634  CHECK(deserializedNetwork);
635 
636  const std::vector<armnn::ConstTensor>& constants {anchors};
638  layerName, inputInfos, outputInfos, descriptor, constants);
639  deserializedNetwork->ExecuteStrategy(verifier);
640 }
641 
642 TEST_CASE("SerializeDivision")
643 {
644  const std::string layerName("division");
645  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
646 
648  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
649  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
650  armnn::IConnectableLayer* const divisionLayer = network->AddDivisionLayer(layerName.c_str());
651  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
652 
653  inputLayer0->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(0));
654  inputLayer1->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(1));
655  divisionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
656 
657  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
658  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
659  divisionLayer->GetOutputSlot(0).SetTensorInfo(info);
660 
661  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
662  CHECK(deserializedNetwork);
663 
664  LayerVerifierBase verifier(layerName, {info, info}, {info});
665  deserializedNetwork->ExecuteStrategy(verifier);
666 }
667 
668 TEST_CASE("SerializeDeserializeEqual")
669 {
670  const std::string layerName("EqualLayer");
671  const armnn::TensorInfo inputTensorInfo1 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
672  const armnn::TensorInfo inputTensorInfo2 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
673  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Boolean);
674 
676  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
677  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
679  armnn::IConnectableLayer* const equalLayer = network->AddEqualLayer(layerName.c_str());
681  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
682 
683  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
684  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo1);
685  inputLayer2->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
686  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo2);
687  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
688  equalLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
689 
690  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
691  CHECK(deserializedNetwork);
692 
693  LayerVerifierBase verifier(layerName, {inputTensorInfo1, inputTensorInfo2}, {outputTensorInfo});
694  deserializedNetwork->ExecuteStrategy(verifier);
695 }
696 
697 void SerializeElementwiseUnaryTest(armnn::UnaryOperation unaryOperation)
698 {
699  auto layerName = GetUnaryOperationAsCString(unaryOperation);
700 
701  const armnn::TensorShape shape{2, 1, 2, 2};
702 
705 
706  armnn::ElementwiseUnaryDescriptor descriptor(unaryOperation);
707 
709  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
710  armnn::IConnectableLayer* const elementwiseUnaryLayer =
711  network->AddElementwiseUnaryLayer(descriptor, layerName);
712  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
713 
714  inputLayer->GetOutputSlot(0).Connect(elementwiseUnaryLayer->GetInputSlot(0));
715  elementwiseUnaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
716 
717  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
718  elementwiseUnaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
719 
720  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
721 
722  CHECK(deserializedNetwork);
723 
725  verifier(layerName, { inputInfo }, { outputInfo }, descriptor);
726 
727  deserializedNetwork->ExecuteStrategy(verifier);
728 }
729 
730 TEST_CASE("SerializeElementwiseUnary")
731 {
732  using op = armnn::UnaryOperation;
733  std::initializer_list<op> allUnaryOperations = {op::Abs, op::Exp, op::Sqrt, op::Rsqrt, op::Neg,
734  op::LogicalNot, op::Log, op::Sin};
735 
736  for (auto unaryOperation : allUnaryOperations)
737  {
738  SerializeElementwiseUnaryTest(unaryOperation);
739  }
740 }
741 
742 TEST_CASE("SerializeFill")
743 {
744  const std::string layerName("fill");
745  const armnn::TensorInfo inputInfo({4}, armnn::DataType::Signed32);
746  const armnn::TensorInfo outputInfo({1, 3, 3, 1}, armnn::DataType::Float32);
747 
748  armnn::FillDescriptor descriptor(1.0f);
749 
751  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
752  armnn::IConnectableLayer* const fillLayer = network->AddFillLayer(descriptor, layerName.c_str());
753  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
754 
755  inputLayer->GetOutputSlot(0).Connect(fillLayer->GetInputSlot(0));
756  fillLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
757 
758  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
759  fillLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
760 
761  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
762  CHECK(deserializedNetwork);
763 
764  LayerVerifierBaseWithDescriptor<armnn::FillDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
765 
766  deserializedNetwork->ExecuteStrategy(verifier);
767 }
768 
769 TEST_CASE("SerializeFloor")
770 {
771  const std::string layerName("floor");
773 
775  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
776  armnn::IConnectableLayer* const floorLayer = network->AddFloorLayer(layerName.c_str());
777  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
778 
779  inputLayer->GetOutputSlot(0).Connect(floorLayer->GetInputSlot(0));
780  floorLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
781 
782  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
783  floorLayer->GetOutputSlot(0).SetTensorInfo(info);
784 
785  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
786  CHECK(deserializedNetwork);
787 
788  LayerVerifierBase verifier(layerName, {info}, {info});
789  deserializedNetwork->ExecuteStrategy(verifier);
790 }
791 
793 class FullyConnectedLayerVerifier : public LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>
794 {
795 public:
796  FullyConnectedLayerVerifier(const std::string& layerName,
797  const std::vector<armnn::TensorInfo>& inputInfos,
798  const std::vector<armnn::TensorInfo>& outputInfos,
799  const FullyConnectedDescriptor& descriptor)
800  : LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
801 
802  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
803  const armnn::BaseDescriptor& descriptor,
804  const std::vector<armnn::ConstTensor>& constants,
805  const char* name,
806  const armnn::LayerBindingId id = 0) override
807  {
808  armnn::IgnoreUnused(constants, id);
809  switch (layer->GetType())
810  {
811  case armnn::LayerType::Input: break;
812  case armnn::LayerType::Output: break;
813  case armnn::LayerType::Constant: break;
814  default:
815  {
816  VerifyNameAndConnections(layer, name);
817  const FullyConnectedDescriptor& layerDescriptor =
818  static_cast<const FullyConnectedDescriptor&>(descriptor);
819  CHECK(layerDescriptor.m_ConstantWeights == m_Descriptor.m_ConstantWeights);
820  CHECK(layerDescriptor.m_BiasEnabled == m_Descriptor.m_BiasEnabled);
821  CHECK(layerDescriptor.m_TransposeWeightMatrix == m_Descriptor.m_TransposeWeightMatrix);
822  }
823  }
824  }
825 };
826 
827 TEST_CASE("SerializeFullyConnected")
828 {
829  const std::string layerName("fullyConnected");
830  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
831  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
832 
833  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32);
834  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
835  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
836  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
837  armnn::ConstTensor weights(weightsInfo, weightsData);
838  armnn::ConstTensor biases(biasesInfo, biasesData);
839 
841  descriptor.m_BiasEnabled = true;
842  descriptor.m_TransposeWeightMatrix = false;
843  descriptor.m_ConstantWeights = true;
844 
846  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
847 
848  // Old way of handling constant tensors.
850  armnn::IConnectableLayer* const fullyConnectedLayer =
851  network->AddFullyConnectedLayer(descriptor,
852  weights,
854  layerName.c_str());
856 
857  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
858 
859  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
860  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
861 
862  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
863  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
864 
865  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
866  CHECK(deserializedNetwork);
867 
868  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
869  deserializedNetwork->ExecuteStrategy(verifier);
870 }
871 
872 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsInputs")
873 {
874  const std::string layerName("fullyConnected_weights_as_inputs");
875  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
876  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
877 
878  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32);
879  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
880 
883 
885  descriptor.m_BiasEnabled = true;
886  descriptor.m_TransposeWeightMatrix = false;
887  descriptor.m_ConstantWeights = false;
888 
890  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
891  armnn::IConnectableLayer* const weightsInputLayer = network->AddInputLayer(1);
892  armnn::IConnectableLayer* const biasInputLayer = network->AddInputLayer(2);
893  armnn::IConnectableLayer* const fullyConnectedLayer =
894  network->AddFullyConnectedLayer(descriptor,
895  layerName.c_str());
896  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
897 
898  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
899  weightsInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
900  biasInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
901  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
902 
903  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
904  weightsInputLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
905  biasInputLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
906  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
907 
908  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
909  CHECK(deserializedNetwork);
910 
911  const std::vector<armnn::ConstTensor> constants {};
913  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
914  deserializedNetwork->ExecuteStrategy(verifier);
915 }
916 
917 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsConstantLayers")
918 {
919  const std::string layerName("fullyConnected_weights_as_inputs");
920  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
921  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
922 
923  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32);
924  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
925 
926  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
927  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
928  armnn::ConstTensor weights(weightsInfo, weightsData);
929  armnn::ConstTensor biases(biasesInfo, biasesData);
930 
932  descriptor.m_BiasEnabled = true;
933  descriptor.m_TransposeWeightMatrix = false;
934  descriptor.m_ConstantWeights = true;
935 
937  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
938  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
939  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
940  armnn::IConnectableLayer* const fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor,layerName.c_str());
941  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
942 
943  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
944  weightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
945  biasesLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
946  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
947 
948  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
949  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
950  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
951  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
952 
953  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
954  CHECK(deserializedNetwork);
955 
956  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
957  deserializedNetwork->ExecuteStrategy(verifier);
958 }
959 
960 TEST_CASE("SerializeGather")
961 {
963  class GatherLayerVerifier : public LayerVerifierBaseWithDescriptor<GatherDescriptor>
964  {
965  public:
966  GatherLayerVerifier(const std::string& layerName,
967  const std::vector<armnn::TensorInfo>& inputInfos,
968  const std::vector<armnn::TensorInfo>& outputInfos,
969  const GatherDescriptor& descriptor)
970  : LayerVerifierBaseWithDescriptor<GatherDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
971 
972  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
973  const armnn::BaseDescriptor& descriptor,
974  const std::vector<armnn::ConstTensor>& constants,
975  const char* name,
976  const armnn::LayerBindingId id = 0) override
977  {
978  armnn::IgnoreUnused(constants, id);
979  switch (layer->GetType())
980  {
981  case armnn::LayerType::Input: break;
982  case armnn::LayerType::Output: break;
983  case armnn::LayerType::Constant: break;
984  default:
985  {
986  VerifyNameAndConnections(layer, name);
987  const GatherDescriptor& layerDescriptor = static_cast<const GatherDescriptor&>(descriptor);
988  CHECK(layerDescriptor.m_Axis == m_Descriptor.m_Axis);
989  }
990  }
991  }
992  };
993 
994  const std::string layerName("gather");
995  armnn::TensorInfo paramsInfo({ 8 }, armnn::DataType::QAsymmU8);
996  armnn::TensorInfo outputInfo({ 3 }, armnn::DataType::QAsymmU8);
997  const armnn::TensorInfo indicesInfo({ 3 }, armnn::DataType::Signed32);
998  GatherDescriptor descriptor;
999  descriptor.m_Axis = 1;
1000 
1001  paramsInfo.SetQuantizationScale(1.0f);
1002  paramsInfo.SetQuantizationOffset(0);
1003  outputInfo.SetQuantizationScale(1.0f);
1004  outputInfo.SetQuantizationOffset(0);
1005 
1006  const std::vector<int32_t>& indicesData = {7, 6, 5};
1007 
1009  armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
1010  armnn::IConnectableLayer *const constantLayer =
1011  network->AddConstantLayer(armnn::ConstTensor(indicesInfo, indicesData));
1012  armnn::IConnectableLayer *const gatherLayer = network->AddGatherLayer(descriptor, layerName.c_str());
1013  armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
1014 
1015  inputLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(0));
1016  constantLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(1));
1017  gatherLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1018 
1019  inputLayer->GetOutputSlot(0).SetTensorInfo(paramsInfo);
1020  constantLayer->GetOutputSlot(0).SetTensorInfo(indicesInfo);
1021  gatherLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1022 
1023  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1024  CHECK(deserializedNetwork);
1025 
1026  GatherLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo}, descriptor);
1027  deserializedNetwork->ExecuteStrategy(verifier);
1028 }
1029 
1030 
1031 // NOTE: Until the deprecated AddGreaterLayer disappears this test checks that calling
1032 // AddGreaterLayer places a ComparisonLayer into the serialized format and that
1033 // when this deserialises we have a ComparisonLayer
1034 TEST_CASE("SerializeGreaterDeprecated")
1035 {
1036  const std::string layerName("greater");
1037 
1038  const armnn::TensorShape shape{2, 1, 2, 4};
1039 
1042 
1044  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1045  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1047  armnn::IConnectableLayer* const equalLayer = network->AddGreaterLayer(layerName.c_str());
1049  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1050 
1051  inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
1052  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
1053  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1054 
1055  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1056  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1057  equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1058 
1059  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1060  CHECK(deserializedNetwork);
1061 
1062  LayerVerifierBase verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
1063  deserializedNetwork->ExecuteStrategy(verifier);
1064 }
1065 
1066 
1067 TEST_CASE("SerializeInstanceNormalization")
1068 {
1069  const std::string layerName("instanceNormalization");
1070  const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32);
1071 
1073  descriptor.m_Gamma = 1.1f;
1074  descriptor.m_Beta = 0.1f;
1075  descriptor.m_Eps = 0.0001f;
1076  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
1077 
1079  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1080  armnn::IConnectableLayer* const instanceNormLayer =
1081  network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1082  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1083 
1084  inputLayer->GetOutputSlot(0).Connect(instanceNormLayer->GetInputSlot(0));
1085  instanceNormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1086 
1087  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1088  instanceNormLayer->GetOutputSlot(0).SetTensorInfo(info);
1089 
1090  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1091  CHECK(deserializedNetwork);
1092 
1094  layerName, {info}, {info}, descriptor);
1095  deserializedNetwork->ExecuteStrategy(verifier);
1096 }
1097 
1098 TEST_CASE("SerializeL2Normalization")
1099 {
1100  const std::string l2NormLayerName("l2Normalization");
1101  const armnn::TensorInfo info({1, 2, 1, 5}, armnn::DataType::Float32);
1102 
1105  desc.m_Eps = 0.0001f;
1106 
1108  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1109  armnn::IConnectableLayer* const l2NormLayer = network->AddL2NormalizationLayer(desc, l2NormLayerName.c_str());
1110  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1111 
1112  inputLayer0->GetOutputSlot(0).Connect(l2NormLayer->GetInputSlot(0));
1113  l2NormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1114 
1115  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1116  l2NormLayer->GetOutputSlot(0).SetTensorInfo(info);
1117 
1118  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1119  CHECK(deserializedNetwork);
1120 
1122  l2NormLayerName, {info}, {info}, desc);
1123  deserializedNetwork->ExecuteStrategy(verifier);
1124 }
1125 
1126 TEST_CASE("EnsureL2NormalizationBackwardCompatibility")
1127 {
1128  // The hex data below is a flat buffer containing a simple network with one input
1129  // a L2Normalization layer and an output layer with dimensions as per the tensor infos below.
1130  //
1131  // This test verifies that we can still read back these old style
1132  // models without the normalization epsilon value.
1133  const std::vector<uint8_t> l2NormalizationModel =
1134  {
1135  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1136  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1137  0x3C, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1138  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1139  0x04, 0x00, 0x00, 0x00, 0xD6, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
1140  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
1141  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1142  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1143  0x4C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
1144  0x00, 0x20, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1145  0x20, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x06, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1146  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1147  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1F, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x20, 0x00,
1148  0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x6C, 0x32, 0x4E, 0x6F, 0x72, 0x6D, 0x61, 0x6C, 0x69, 0x7A, 0x61, 0x74,
1149  0x69, 0x6F, 0x6E, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00,
1150  0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1151  0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
1152  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
1153  0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1154  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1155  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1156  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1157  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1158  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1159  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1160  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1161  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1162  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1163  0x05, 0x00, 0x00, 0x00, 0x00
1164  };
1165 
1166  armnn::INetworkPtr deserializedNetwork =
1167  DeserializeNetwork(std::string(l2NormalizationModel.begin(), l2NormalizationModel.end()));
1168  CHECK(deserializedNetwork);
1169 
1170  const std::string layerName("l2Normalization");
1171  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 2, 1, 5}, armnn::DataType::Float32);
1172 
1175  // Since this variable does not exist in the l2NormalizationModel dump, the default value will be loaded
1176  desc.m_Eps = 1e-12f;
1177 
1179  layerName, {inputInfo}, {inputInfo}, desc);
1180  deserializedNetwork->ExecuteStrategy(verifier);
1181 }
1182 
1183 TEST_CASE("SerializeLogicalBinary")
1184 {
1185  const std::string layerName("logicalBinaryAnd");
1186 
1187  const armnn::TensorShape shape{2, 1, 2, 2};
1188 
1191 
1193 
1195  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1196  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1197  armnn::IConnectableLayer* const logicalBinaryLayer = network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1198  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1199 
1200  inputLayer0->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(0));
1201  inputLayer1->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(1));
1202  logicalBinaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1203 
1204  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1205  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1206  logicalBinaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1207 
1208  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1209  CHECK(deserializedNetwork);
1210 
1212  layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
1213  deserializedNetwork->ExecuteStrategy(verifier);
1214 }
1215 
1216 TEST_CASE("SerializeLogSoftmax")
1217 {
1218  const std::string layerName("log_softmax");
1220 
1221  armnn::LogSoftmaxDescriptor descriptor;
1222  descriptor.m_Beta = 1.0f;
1223  descriptor.m_Axis = -1;
1224 
1226  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1227  armnn::IConnectableLayer* const logSoftmaxLayer = network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1228  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1229 
1230  inputLayer->GetOutputSlot(0).Connect(logSoftmaxLayer->GetInputSlot(0));
1231  logSoftmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1232 
1233  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1234  logSoftmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
1235 
1236  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1237  CHECK(deserializedNetwork);
1238 
1239  LayerVerifierBaseWithDescriptor<armnn::LogSoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
1240  deserializedNetwork->ExecuteStrategy(verifier);
1241 }
1242 
1243 TEST_CASE("SerializeMaximum")
1244 {
1245  const std::string layerName("maximum");
1246  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1247 
1249  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1250  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1251  armnn::IConnectableLayer* const maximumLayer = network->AddMaximumLayer(layerName.c_str());
1252  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1253 
1254  inputLayer0->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(0));
1255  inputLayer1->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(1));
1256  maximumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1257 
1258  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1259  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1260  maximumLayer->GetOutputSlot(0).SetTensorInfo(info);
1261 
1262  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1263  CHECK(deserializedNetwork);
1264 
1265  LayerVerifierBase verifier(layerName, {info, info}, {info});
1266  deserializedNetwork->ExecuteStrategy(verifier);
1267 }
1268 
1269 TEST_CASE("SerializeMean")
1270 {
1271  const std::string layerName("mean");
1272  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1273  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1274 
1275  armnn::MeanDescriptor descriptor;
1276  descriptor.m_Axis = { 2 };
1277  descriptor.m_KeepDims = true;
1278 
1280  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1281  armnn::IConnectableLayer* const meanLayer = network->AddMeanLayer(descriptor, layerName.c_str());
1282  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1283 
1284  inputLayer->GetOutputSlot(0).Connect(meanLayer->GetInputSlot(0));
1285  meanLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1286 
1287  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1288  meanLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1289 
1290  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1291  CHECK(deserializedNetwork);
1292 
1293  LayerVerifierBaseWithDescriptor<armnn::MeanDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1294  deserializedNetwork->ExecuteStrategy(verifier);
1295 }
1296 
1297 TEST_CASE("SerializeMerge")
1298 {
1299  const std::string layerName("merge");
1300  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1301 
1303  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1304  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1305  armnn::IConnectableLayer* const mergeLayer = network->AddMergeLayer(layerName.c_str());
1306  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1307 
1308  inputLayer0->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(0));
1309  inputLayer1->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(1));
1310  mergeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1311 
1312  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1313  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1314  mergeLayer->GetOutputSlot(0).SetTensorInfo(info);
1315 
1316  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1317  CHECK(deserializedNetwork);
1318 
1319  LayerVerifierBase verifier(layerName, {info, info}, {info});
1320  deserializedNetwork->ExecuteStrategy(verifier);
1321 }
1322 
1323 class MergerLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>
1324 {
1325 public:
1326  MergerLayerVerifier(const std::string& layerName,
1327  const std::vector<armnn::TensorInfo>& inputInfos,
1328  const std::vector<armnn::TensorInfo>& outputInfos,
1329  const armnn::OriginsDescriptor& descriptor)
1330  : LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1331 
1332  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1333  const armnn::BaseDescriptor& descriptor,
1334  const std::vector<armnn::ConstTensor>& constants,
1335  const char* name,
1336  const armnn::LayerBindingId id = 0) override
1337  {
1338  armnn::IgnoreUnused(descriptor, constants, id);
1339  switch (layer->GetType())
1340  {
1341  case armnn::LayerType::Input: break;
1342  case armnn::LayerType::Output: break;
1344  {
1345  throw armnn::Exception("MergerLayer should have translated to ConcatLayer");
1346  break;
1347  }
1349  {
1350  VerifyNameAndConnections(layer, name);
1351  const armnn::MergerDescriptor& layerDescriptor =
1352  static_cast<const armnn::MergerDescriptor&>(descriptor);
1353  VerifyDescriptor(layerDescriptor);
1354  break;
1355  }
1356  default:
1357  {
1358  throw armnn::Exception("Unexpected layer type in Merge test model");
1359  }
1360  }
1361  }
1362 };
1363 
1364 // NOTE: Until the deprecated AddMergerLayer disappears this test checks that calling
1365 // AddMergerLayer places a ConcatLayer into the serialized format and that
1366 // when this deserialises we have a ConcatLayer
1367 TEST_CASE("SerializeMerger")
1368 {
1369  const std::string layerName("merger");
1370  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1371  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1372 
1373  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1374 
1375  armnn::OriginsDescriptor descriptor =
1376  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1377 
1379  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1380  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1382  armnn::IConnectableLayer* const mergerLayer = network->AddMergerLayer(descriptor, layerName.c_str());
1384  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1385 
1386  inputLayerOne->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
1387  inputLayerTwo->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
1388  mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1389 
1390  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1391  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1392  mergerLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1393 
1394  std::string mergerLayerNetwork = SerializeNetwork(*network);
1395  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(mergerLayerNetwork);
1396  CHECK(deserializedNetwork);
1397 
1398  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1399  deserializedNetwork->ExecuteStrategy(verifier);
1400 }
1401 
1402 TEST_CASE("EnsureMergerLayerBackwardCompatibility")
1403 {
1404  // The hex data below is a flat buffer containing a simple network with two inputs
1405  // a merger layer (now deprecated) and an output layer with dimensions as per the tensor infos below.
1406  //
1407  // This test verifies that we can still read back these old style
1408  // models replacing the MergerLayers with ConcatLayers with the same parameters.
1409  const std::vector<uint8_t> mergerModel =
1410  {
1411  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1412  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1413  0x38, 0x02, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1414  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1415  0xF4, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1416  0x00, 0x00, 0x9A, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1417  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1418  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1419  0xF8, 0xFE, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1420  0x00, 0x1F, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1421  0x68, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1422  0x0C, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1423  0x02, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x22, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1424  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1425  0x00, 0x00, 0x00, 0x00, 0x3E, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1426  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1427  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1428  0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x6D, 0x65, 0x72, 0x67, 0x65, 0x72, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1429  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1430  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1431  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00,
1432  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1433  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1434  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1435  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1436  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1437  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1438  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1439  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1440  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1441  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1442  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1443  0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1444  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1445  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1446  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1447  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1448  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1449  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1450  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1451  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1452  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1453  0x02, 0x00, 0x00, 0x00
1454  };
1455 
1456  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(mergerModel.begin(), mergerModel.end()));
1457  CHECK(deserializedNetwork);
1458 
1459  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 2, 3, 2, 2 }, armnn::DataType::Float32);
1460  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 4, 3, 2, 2 }, armnn::DataType::Float32);
1461 
1462  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1463 
1464  armnn::OriginsDescriptor descriptor =
1465  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1466 
1467  MergerLayerVerifier verifier("merger", { inputInfo, inputInfo }, { outputInfo }, descriptor);
1468  deserializedNetwork->ExecuteStrategy(verifier);
1469 }
1470 
1471 TEST_CASE("SerializeConcat")
1472 {
1473  const std::string layerName("concat");
1474  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1475  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1476 
1477  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1478 
1479  armnn::OriginsDescriptor descriptor =
1480  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1481 
1483  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1484  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1485  armnn::IConnectableLayer* const concatLayer = network->AddConcatLayer(descriptor, layerName.c_str());
1486  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1487 
1488  inputLayerOne->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
1489  inputLayerTwo->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
1490  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1491 
1492  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1493  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1494  concatLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1495 
1496  std::string concatLayerNetwork = SerializeNetwork(*network);
1497  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(concatLayerNetwork);
1498  CHECK(deserializedNetwork);
1499 
1500  // NOTE: using the MergerLayerVerifier to ensure that it is a concat layer and not a
1501  // merger layer that gets placed into the graph.
1502  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1503  deserializedNetwork->ExecuteStrategy(verifier);
1504 }
1505 
1506 TEST_CASE("SerializeMinimum")
1507 {
1508  const std::string layerName("minimum");
1509  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1510 
1512  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1513  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1514  armnn::IConnectableLayer* const minimumLayer = network->AddMinimumLayer(layerName.c_str());
1515  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1516 
1517  inputLayer0->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(0));
1518  inputLayer1->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(1));
1519  minimumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1520 
1521  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1522  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1523  minimumLayer->GetOutputSlot(0).SetTensorInfo(info);
1524 
1525  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1526  CHECK(deserializedNetwork);
1527 
1528  LayerVerifierBase verifier(layerName, {info, info}, {info});
1529  deserializedNetwork->ExecuteStrategy(verifier);
1530 }
1531 
1532 TEST_CASE("SerializeMultiplication")
1533 {
1534  const std::string layerName("multiplication");
1535  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
1536 
1538  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1539  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1540  armnn::IConnectableLayer* const multiplicationLayer = network->AddMultiplicationLayer(layerName.c_str());
1541  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1542 
1543  inputLayer0->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
1544  inputLayer1->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
1545  multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1546 
1547  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1548  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1549  multiplicationLayer->GetOutputSlot(0).SetTensorInfo(info);
1550 
1551  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1552  CHECK(deserializedNetwork);
1553 
1554  LayerVerifierBase verifier(layerName, {info, info}, {info});
1555  deserializedNetwork->ExecuteStrategy(verifier);
1556 }
1557 
1558 TEST_CASE("SerializePrelu")
1559 {
1560  const std::string layerName("prelu");
1561 
1562  armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32);
1563  armnn::TensorInfo alphaTensorInfo ({ 5, 4, 3, 1 }, armnn::DataType::Float32);
1564  armnn::TensorInfo outputTensorInfo({ 5, 4, 3, 2 }, armnn::DataType::Float32);
1565 
1567  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1568  armnn::IConnectableLayer* const alphaLayer = network->AddInputLayer(1);
1569  armnn::IConnectableLayer* const preluLayer = network->AddPreluLayer(layerName.c_str());
1570  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1571 
1572  inputLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(0));
1573  alphaLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(1));
1574  preluLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1575 
1576  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1577  alphaLayer->GetOutputSlot(0).SetTensorInfo(alphaTensorInfo);
1578  preluLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1579 
1580  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1581  CHECK(deserializedNetwork);
1582 
1583  LayerVerifierBase verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
1584  deserializedNetwork->ExecuteStrategy(verifier);
1585 }
1586 
1587 TEST_CASE("SerializeNormalization")
1588 {
1589  const std::string layerName("normalization");
1590  const armnn::TensorInfo info({2, 1, 2, 2}, armnn::DataType::Float32);
1591 
1594  desc.m_NormSize = 3;
1595  desc.m_Alpha = 1;
1596  desc.m_Beta = 1;
1597  desc.m_K = 1;
1598 
1600  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1601  armnn::IConnectableLayer* const normalizationLayer = network->AddNormalizationLayer(desc, layerName.c_str());
1602  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1603 
1604  inputLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
1605  normalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1606 
1607  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1608  normalizationLayer->GetOutputSlot(0).SetTensorInfo(info);
1609 
1610  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1611  CHECK(deserializedNetwork);
1612 
1614  deserializedNetwork->ExecuteStrategy(verifier);
1615 }
1616 
1617 TEST_CASE("SerializePad")
1618 {
1619  const std::string layerName("pad");
1620  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1621  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1622 
1623  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1624 
1626  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1627  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1628  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1629 
1630  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1631  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1632 
1633  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1634  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1635 
1636  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1637  CHECK(deserializedNetwork);
1638 
1640  {inputTensorInfo},
1641  {outputTensorInfo},
1642  desc);
1643  deserializedNetwork->ExecuteStrategy(verifier);
1644 }
1645 
1646 TEST_CASE("EnsurePadBackwardCompatibility")
1647 {
1648  // The PadDescriptor is being extended with a float PadValue (so a value other than 0
1649  // can be used to pad the tensor.
1650  //
1651  // This test contains a binary representation of a simple input->pad->output network
1652  // prior to this change to test that the descriptor has been updated in a backward
1653  // compatible way with respect to Deserialization of older binary dumps
1654  const std::vector<uint8_t> padModel =
1655  {
1656  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1657  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1658  0x54, 0x01, 0x00, 0x00, 0x6C, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1659  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD0, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1660  0x04, 0x00, 0x00, 0x00, 0x96, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1661  0x00, 0x00, 0x72, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1662  0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
1663  0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00,
1664  0x00, 0x00, 0x00, 0x00, 0x24, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x16, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00,
1665  0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x4C, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1666  0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00,
1667  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1668  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
1669  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1670  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00,
1671  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x70, 0x61, 0x64, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00,
1672  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1673  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1674  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
1675  0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
1676  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00,
1677  0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00,
1678  0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1679  0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00,
1680  0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1681  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1682  0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1683  0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
1684  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
1685  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00
1686  };
1687 
1688  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(padModel.begin(), padModel.end()));
1689  CHECK(deserializedNetwork);
1690 
1691  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 1, 2, 3, 4 }, armnn::DataType::Float32);
1692  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 1, 3, 5, 7 }, armnn::DataType::Float32);
1693 
1694  armnn::PadDescriptor descriptor({{ 0, 0 }, { 1, 0 }, { 1, 1 }, { 1, 2 }});
1695 
1696  LayerVerifierBaseWithDescriptor<armnn::PadDescriptor> verifier("pad", { inputInfo }, { outputInfo }, descriptor);
1697  deserializedNetwork->ExecuteStrategy(verifier);
1698 }
1699 
1700 TEST_CASE("SerializePermute")
1701 {
1702  const std::string layerName("permute");
1703  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
1704  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1705 
1706  armnn::PermuteDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
1707 
1709  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1710  armnn::IConnectableLayer* const permuteLayer = network->AddPermuteLayer(descriptor, layerName.c_str());
1711  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1712 
1713  inputLayer->GetOutputSlot(0).Connect(permuteLayer->GetInputSlot(0));
1714  permuteLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1715 
1716  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1717  permuteLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1718 
1719  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1720  CHECK(deserializedNetwork);
1721 
1723  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
1724  deserializedNetwork->ExecuteStrategy(verifier);
1725 }
1726 
1727 TEST_CASE("SerializePooling2d")
1728 {
1729  const std::string layerName("pooling2d");
1730  const armnn::TensorInfo inputInfo({1, 2, 2, 1}, armnn::DataType::Float32);
1731  const armnn::TensorInfo outputInfo({1, 1, 1, 1}, armnn::DataType::Float32);
1732 
1735  desc.m_PadTop = 0;
1736  desc.m_PadBottom = 0;
1737  desc.m_PadLeft = 0;
1738  desc.m_PadRight = 0;
1739  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
1740  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1741  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1742  desc.m_PoolHeight = 2;
1743  desc.m_PoolWidth = 2;
1744  desc.m_StrideX = 2;
1745  desc.m_StrideY = 2;
1746 
1748  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1749  armnn::IConnectableLayer* const pooling2dLayer = network->AddPooling2dLayer(desc, layerName.c_str());
1750  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1751 
1752  inputLayer->GetOutputSlot(0).Connect(pooling2dLayer->GetInputSlot(0));
1753  pooling2dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1754 
1755  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1756  pooling2dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1757 
1758  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1759  CHECK(deserializedNetwork);
1760 
1762  layerName, {inputInfo}, {outputInfo}, desc);
1763  deserializedNetwork->ExecuteStrategy(verifier);
1764 }
1765 
1766 TEST_CASE("SerializeQuantize")
1767 {
1768  const std::string layerName("quantize");
1769  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1770 
1772  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1773  armnn::IConnectableLayer* const quantizeLayer = network->AddQuantizeLayer(layerName.c_str());
1774  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1775 
1776  inputLayer->GetOutputSlot(0).Connect(quantizeLayer->GetInputSlot(0));
1777  quantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1778 
1779  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1780  quantizeLayer->GetOutputSlot(0).SetTensorInfo(info);
1781 
1782  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1783  CHECK(deserializedNetwork);
1784 
1785  LayerVerifierBase verifier(layerName, {info}, {info});
1786  deserializedNetwork->ExecuteStrategy(verifier);
1787 }
1788 
1789 TEST_CASE("SerializeRank")
1790 {
1791  const std::string layerName("rank");
1792  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
1793  const armnn::TensorInfo outputInfo({1}, armnn::DataType::Signed32);
1794 
1796  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1797  armnn::IConnectableLayer* const rankLayer = network->AddRankLayer(layerName.c_str());
1798  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1799 
1800  inputLayer->GetOutputSlot(0).Connect(rankLayer->GetInputSlot(0));
1801  rankLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1802 
1803  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1804  rankLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1805 
1806  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1807  CHECK(deserializedNetwork);
1808 
1809  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
1810  deserializedNetwork->ExecuteStrategy(verifier);
1811 }
1812 
1813 TEST_CASE("SerializeReduceSum")
1814 {
1815  const std::string layerName("Reduce_Sum");
1816  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1817  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1818 
1819  armnn::ReduceDescriptor descriptor;
1820  descriptor.m_vAxis = { 2 };
1821  descriptor.m_ReduceOperation = armnn::ReduceOperation::Sum;
1822 
1824  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1825  armnn::IConnectableLayer* const reduceSumLayer = network->AddReduceLayer(descriptor, layerName.c_str());
1826  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1827 
1828  inputLayer->GetOutputSlot(0).Connect(reduceSumLayer->GetInputSlot(0));
1829  reduceSumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1830 
1831  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1832  reduceSumLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1833 
1834  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1835  CHECK(deserializedNetwork);
1836 
1837  LayerVerifierBaseWithDescriptor<armnn::ReduceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1838  deserializedNetwork->ExecuteStrategy(verifier);
1839 }
1840 
1841 TEST_CASE("SerializeReshape")
1842 {
1843  const std::string layerName("reshape");
1844  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
1845  const armnn::TensorInfo outputInfo({3, 3}, armnn::DataType::Float32);
1846 
1847  armnn::ReshapeDescriptor descriptor({3, 3});
1848 
1850  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1851  armnn::IConnectableLayer* const reshapeLayer = network->AddReshapeLayer(descriptor, layerName.c_str());
1852  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1853 
1854  inputLayer->GetOutputSlot(0).Connect(reshapeLayer->GetInputSlot(0));
1855  reshapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1856 
1857  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1858  reshapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1859 
1860  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1861  CHECK(deserializedNetwork);
1862 
1864  layerName, {inputInfo}, {outputInfo}, descriptor);
1865  deserializedNetwork->ExecuteStrategy(verifier);
1866 }
1867 
1868 TEST_CASE("SerializeResize")
1869 {
1870  const std::string layerName("resize");
1871  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
1872  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
1873 
1875  desc.m_TargetWidth = 4;
1876  desc.m_TargetHeight = 2;
1877  desc.m_Method = armnn::ResizeMethod::NearestNeighbor;
1878  desc.m_AlignCorners = true;
1879  desc.m_HalfPixelCenters = true;
1880 
1882  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1883  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
1884  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1885 
1886  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
1887  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1888 
1889  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1890  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1891 
1892  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1893  CHECK(deserializedNetwork);
1894 
1895  LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
1896  deserializedNetwork->ExecuteStrategy(verifier);
1897 }
1898 
1899 class ResizeBilinearLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::ResizeBilinearDescriptor>
1900 {
1901 public:
1902  ResizeBilinearLayerVerifier(const std::string& layerName,
1903  const std::vector<armnn::TensorInfo>& inputInfos,
1904  const std::vector<armnn::TensorInfo>& outputInfos,
1905  const armnn::ResizeBilinearDescriptor& descriptor)
1907  layerName, inputInfos, outputInfos, descriptor) {}
1908 
1909  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1910  const armnn::BaseDescriptor& descriptor,
1911  const std::vector<armnn::ConstTensor>& constants,
1912  const char* name,
1913  const armnn::LayerBindingId id = 0) override
1914  {
1915  armnn::IgnoreUnused(descriptor, constants, id);
1916  switch (layer->GetType())
1917  {
1918  case armnn::LayerType::Input: break;
1919  case armnn::LayerType::Output: break;
1921  {
1922  VerifyNameAndConnections(layer, name);
1923  const armnn::ResizeDescriptor& layerDescriptor =
1924  static_cast<const armnn::ResizeDescriptor&>(descriptor);
1925  CHECK(layerDescriptor.m_Method == armnn::ResizeMethod::Bilinear);
1926  CHECK(layerDescriptor.m_TargetWidth == m_Descriptor.m_TargetWidth);
1927  CHECK(layerDescriptor.m_TargetHeight == m_Descriptor.m_TargetHeight);
1928  CHECK(layerDescriptor.m_DataLayout == m_Descriptor.m_DataLayout);
1929  CHECK(layerDescriptor.m_AlignCorners == m_Descriptor.m_AlignCorners);
1930  CHECK(layerDescriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters);
1931  break;
1932  }
1933  default:
1934  {
1935  throw armnn::Exception("Unexpected layer type in test model. ResizeBiliniar "
1936  "should have translated to Resize");
1937  }
1938  }
1939  }
1940 };
1941 
1942 // NOTE: Until the deprecated AddResizeBilinearLayer disappears this test checks that
1943 // calling AddResizeBilinearLayer places a ResizeLayer into the serialized format
1944 // and that when this deserialises we have a ResizeLayer
1945 TEST_CASE("SerializeResizeBilinear")
1946 {
1947  const std::string layerName("resizeBilinear");
1948  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
1949  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
1950 
1952  desc.m_TargetWidth = 4u;
1953  desc.m_TargetHeight = 2u;
1954  desc.m_AlignCorners = true;
1955  desc.m_HalfPixelCenters = true;
1956 
1958  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1960  armnn::IConnectableLayer* const resizeLayer = network->AddResizeBilinearLayer(desc, layerName.c_str());
1962  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1963 
1964  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
1965  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1966 
1967  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1968  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1969 
1970  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1971  CHECK(deserializedNetwork);
1972 
1973  ResizeBilinearLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
1974  deserializedNetwork->ExecuteStrategy(verifier);
1975 }
1976 
1977 TEST_CASE("EnsureResizeBilinearBackwardCompatibility")
1978 {
1979  // The hex data below is a flat buffer containing a simple network with an input,
1980  // a ResizeBilinearLayer (now deprecated) and an output
1981  //
1982  // This test verifies that we can still deserialize this old-style model by replacing
1983  // the ResizeBilinearLayer with an equivalent ResizeLayer
1984  const std::vector<uint8_t> resizeBilinearModel =
1985  {
1986  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1987  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1988  0x50, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1989  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD4, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1990  0x04, 0x00, 0x00, 0x00, 0xC2, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
1991  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x8A, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
1992  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1993  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1994  0x38, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
1995  0x00, 0x1A, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1996  0x34, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x12, 0x00, 0x08, 0x00, 0x0C, 0x00,
1997  0x07, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1998  0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00,
1999  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00,
2000  0x20, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x69, 0x7A, 0x65, 0x42, 0x69, 0x6C, 0x69,
2001  0x6E, 0x65, 0x61, 0x72, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
2002  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
2003  0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2004  0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
2005  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2006  0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
2007  0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
2008  0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00,
2009  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
2010  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2011  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00,
2012  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00,
2013  0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
2014  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
2015  0x00, 0x00, 0x05, 0x00, 0x00, 0x00
2016  };
2017 
2018  armnn::INetworkPtr deserializedNetwork =
2019  DeserializeNetwork(std::string(resizeBilinearModel.begin(), resizeBilinearModel.end()));
2020  CHECK(deserializedNetwork);
2021 
2022  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2023  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2024 
2026  descriptor.m_TargetWidth = 4u;
2027  descriptor.m_TargetHeight = 2u;
2028 
2029  ResizeBilinearLayerVerifier verifier("resizeBilinear", { inputInfo }, { outputInfo }, descriptor);
2030  deserializedNetwork->ExecuteStrategy(verifier);
2031 }
2032 
2033 TEST_CASE("SerializeShape")
2034 {
2035  const std::string layerName("shape");
2036  const armnn::TensorInfo inputInfo({1, 3, 3, 1}, armnn::DataType::Signed32);
2037  const armnn::TensorInfo outputInfo({ 4 }, armnn::DataType::Signed32);
2038 
2040  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2041  armnn::IConnectableLayer* const shapeLayer = network->AddShapeLayer(layerName.c_str());
2042  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2043 
2044  inputLayer->GetOutputSlot(0).Connect(shapeLayer->GetInputSlot(0));
2045  shapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2046 
2047  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2048  shapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2049 
2050  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2051  CHECK(deserializedNetwork);
2052 
2053  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
2054 
2055  deserializedNetwork->ExecuteStrategy(verifier);
2056 }
2057 
2058 TEST_CASE("SerializeSlice")
2059 {
2060  const std::string layerName{"slice"};
2061 
2062  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2063  const armnn::TensorInfo outputInfo = armnn::TensorInfo({2, 2, 2, 1}, armnn::DataType::Float32);
2064 
2065  armnn::SliceDescriptor descriptor({ 0, 0, 1, 0}, {2, 2, 2, 1});
2066 
2068 
2069  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2070  armnn::IConnectableLayer* const sliceLayer = network->AddSliceLayer(descriptor, layerName.c_str());
2071  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2072 
2073  inputLayer->GetOutputSlot(0).Connect(sliceLayer->GetInputSlot(0));
2074  sliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2075 
2076  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2077  sliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2078 
2079  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2080  CHECK(deserializedNetwork);
2081 
2082  LayerVerifierBaseWithDescriptor<armnn::SliceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2083  deserializedNetwork->ExecuteStrategy(verifier);
2084 }
2085 
2086 TEST_CASE("SerializeSoftmax")
2087 {
2088  const std::string layerName("softmax");
2090 
2091  armnn::SoftmaxDescriptor descriptor;
2092  descriptor.m_Beta = 1.0f;
2093 
2095  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2096  armnn::IConnectableLayer* const softmaxLayer = network->AddSoftmaxLayer(descriptor, layerName.c_str());
2097  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2098 
2099  inputLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
2100  softmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2101 
2102  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2103  softmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
2104 
2105  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2106  CHECK(deserializedNetwork);
2107 
2108  LayerVerifierBaseWithDescriptor<armnn::SoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
2109  deserializedNetwork->ExecuteStrategy(verifier);
2110 }
2111 
2112 TEST_CASE("SerializeSpaceToBatchNd")
2113 {
2114  const std::string layerName("spaceToBatchNd");
2115  const armnn::TensorInfo inputInfo({2, 1, 2, 4}, armnn::DataType::Float32);
2116  const armnn::TensorInfo outputInfo({8, 1, 1, 3}, armnn::DataType::Float32);
2117 
2120  desc.m_BlockShape = {2, 2};
2121  desc.m_PadList = {{0, 0}, {2, 0}};
2122 
2124  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2125  armnn::IConnectableLayer* const spaceToBatchNdLayer = network->AddSpaceToBatchNdLayer(desc, layerName.c_str());
2126  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2127 
2128  inputLayer->GetOutputSlot(0).Connect(spaceToBatchNdLayer->GetInputSlot(0));
2129  spaceToBatchNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2130 
2131  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2132  spaceToBatchNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2133 
2134  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2135  CHECK(deserializedNetwork);
2136 
2138  layerName, {inputInfo}, {outputInfo}, desc);
2139  deserializedNetwork->ExecuteStrategy(verifier);
2140 }
2141 
2142 TEST_CASE("SerializeSpaceToDepth")
2143 {
2144  const std::string layerName("spaceToDepth");
2145 
2146  const armnn::TensorInfo inputInfo ({ 1, 16, 8, 3 }, armnn::DataType::Float32);
2147  const armnn::TensorInfo outputInfo({ 1, 8, 4, 12 }, armnn::DataType::Float32);
2148 
2150  desc.m_BlockSize = 2;
2151  desc.m_DataLayout = armnn::DataLayout::NHWC;
2152 
2154  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2155  armnn::IConnectableLayer* const spaceToDepthLayer = network->AddSpaceToDepthLayer(desc, layerName.c_str());
2156  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2157 
2158  inputLayer->GetOutputSlot(0).Connect(spaceToDepthLayer->GetInputSlot(0));
2159  spaceToDepthLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2160 
2161  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2162  spaceToDepthLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2163 
2164  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2165  CHECK(deserializedNetwork);
2166 
2168  layerName, {inputInfo}, {outputInfo}, desc);
2169  deserializedNetwork->ExecuteStrategy(verifier);
2170 }
2171 
2172 TEST_CASE("SerializeSplitter")
2173 {
2174  const unsigned int numViews = 3;
2175  const unsigned int numDimensions = 4;
2176  const unsigned int inputShape[] = {1, 18, 4, 4};
2177  const unsigned int outputShape[] = {1, 6, 4, 4};
2178 
2179  // This is modelled on how the caffe parser sets up a splitter layer to partition an input along dimension one.
2180  unsigned int splitterDimSizes[4] = {static_cast<unsigned int>(inputShape[0]),
2181  static_cast<unsigned int>(inputShape[1]),
2182  static_cast<unsigned int>(inputShape[2]),
2183  static_cast<unsigned int>(inputShape[3])};
2184  splitterDimSizes[1] /= numViews;
2185  armnn::ViewsDescriptor desc(numViews, numDimensions);
2186 
2187  for (unsigned int g = 0; g < numViews; ++g)
2188  {
2189  desc.SetViewOriginCoord(g, 1, splitterDimSizes[1] * g);
2190 
2191  for (unsigned int dimIdx=0; dimIdx < 4; dimIdx++)
2192  {
2193  desc.SetViewSize(g, dimIdx, splitterDimSizes[dimIdx]);
2194  }
2195  }
2196 
2197  const std::string layerName("splitter");
2198  const armnn::TensorInfo inputInfo(numDimensions, inputShape, armnn::DataType::Float32);
2199  const armnn::TensorInfo outputInfo(numDimensions, outputShape, armnn::DataType::Float32);
2200 
2202  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2203  armnn::IConnectableLayer* const splitterLayer = network->AddSplitterLayer(desc, layerName.c_str());
2204  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2205  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2206  armnn::IConnectableLayer* const outputLayer2 = network->AddOutputLayer(2);
2207 
2208  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
2209  splitterLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2210  splitterLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2211  splitterLayer->GetOutputSlot(2).Connect(outputLayer2->GetInputSlot(0));
2212 
2213  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2214  splitterLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2215  splitterLayer->GetOutputSlot(1).SetTensorInfo(outputInfo);
2216  splitterLayer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2217 
2218  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2219  CHECK(deserializedNetwork);
2220 
2222  layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc);
2223  deserializedNetwork->ExecuteStrategy(verifier);
2224 }
2225 
2226 TEST_CASE("SerializeStack")
2227 {
2228  const std::string layerName("stack");
2229 
2230  armnn::TensorInfo inputTensorInfo ({4, 3, 5}, armnn::DataType::Float32);
2231  armnn::TensorInfo outputTensorInfo({4, 3, 2, 5}, armnn::DataType::Float32);
2232 
2233  armnn::StackDescriptor descriptor(2, 2, {4, 3, 5});
2234 
2236  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
2237  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
2238  armnn::IConnectableLayer* const stackLayer = network->AddStackLayer(descriptor, layerName.c_str());
2239  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2240 
2241  inputLayer1->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(0));
2242  inputLayer2->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(1));
2243  stackLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2244 
2245  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2246  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2247  stackLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2248 
2249  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2250  CHECK(deserializedNetwork);
2251 
2253  layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor);
2254  deserializedNetwork->ExecuteStrategy(verifier);
2255 }
2256 
2257 TEST_CASE("SerializeStandIn")
2258 {
2259  const std::string layerName("standIn");
2260 
2261  armnn::TensorInfo tensorInfo({ 1u }, armnn::DataType::Float32);
2262  armnn::StandInDescriptor descriptor(2u, 2u);
2263 
2265  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2266  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2267  armnn::IConnectableLayer* const standInLayer = network->AddStandInLayer(descriptor, layerName.c_str());
2268  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2269  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2270 
2271  inputLayer0->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(0));
2272  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2273 
2274  inputLayer1->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(1));
2275  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2276 
2277  standInLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2278  standInLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2279 
2280  standInLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2281  standInLayer->GetOutputSlot(1).SetTensorInfo(tensorInfo);
2282 
2283  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2284  CHECK(deserializedNetwork);
2285 
2287  layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor);
2288  deserializedNetwork->ExecuteStrategy(verifier);
2289 }
2290 
2291 TEST_CASE("SerializeStridedSlice")
2292 {
2293  const std::string layerName("stridedSlice");
2294  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2295  const armnn::TensorInfo outputInfo = armnn::TensorInfo({3, 1}, armnn::DataType::Float32);
2296 
2297  armnn::StridedSliceDescriptor desc({0, 0, 1, 0}, {1, 1, 1, 1}, {1, 1, 1, 1});
2298  desc.m_EndMask = (1 << 4) - 1;
2299  desc.m_ShrinkAxisMask = (1 << 1) | (1 << 2);
2300  desc.m_DataLayout = armnn::DataLayout::NCHW;
2301 
2303  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2304  armnn::IConnectableLayer* const stridedSliceLayer = network->AddStridedSliceLayer(desc, layerName.c_str());
2305  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2306 
2307  inputLayer->GetOutputSlot(0).Connect(stridedSliceLayer->GetInputSlot(0));
2308  stridedSliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2309 
2310  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2311  stridedSliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2312 
2313  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2314  CHECK(deserializedNetwork);
2315 
2317  layerName, {inputInfo}, {outputInfo}, desc);
2318  deserializedNetwork->ExecuteStrategy(verifier);
2319 }
2320 
2321 TEST_CASE("SerializeSubtraction")
2322 {
2323  const std::string layerName("subtraction");
2325 
2327  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2328  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2329  armnn::IConnectableLayer* const subtractionLayer = network->AddSubtractionLayer(layerName.c_str());
2330  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2331 
2332  inputLayer0->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(0));
2333  inputLayer1->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(1));
2334  subtractionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2335 
2336  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
2337  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
2338  subtractionLayer->GetOutputSlot(0).SetTensorInfo(info);
2339 
2340  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2341  CHECK(deserializedNetwork);
2342 
2343  LayerVerifierBase verifier(layerName, {info, info}, {info});
2344  deserializedNetwork->ExecuteStrategy(verifier);
2345 }
2346 
2347 TEST_CASE("SerializeSwitch")
2348 {
2349  class SwitchLayerVerifier : public LayerVerifierBase
2350  {
2351  public:
2352  SwitchLayerVerifier(const std::string& layerName,
2353  const std::vector<armnn::TensorInfo>& inputInfos,
2354  const std::vector<armnn::TensorInfo>& outputInfos)
2355  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
2356 
2357  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2358  const armnn::BaseDescriptor& descriptor,
2359  const std::vector<armnn::ConstTensor>& constants,
2360  const char* name,
2361  const armnn::LayerBindingId id = 0) override
2362  {
2363  armnn::IgnoreUnused(descriptor, constants, id);
2364  switch (layer->GetType())
2365  {
2366  case armnn::LayerType::Input: break;
2367  case armnn::LayerType::Output: break;
2368  case armnn::LayerType::Constant: break;
2370  {
2371  VerifyNameAndConnections(layer, name);
2372  break;
2373  }
2374  default:
2375  {
2376  throw armnn::Exception("Unexpected layer type in Switch test model");
2377  }
2378  }
2379  }
2380  };
2381 
2382  const std::string layerName("switch");
2384 
2385  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2386  armnn::ConstTensor constTensor(info, constantData);
2387 
2389  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2390  armnn::IConnectableLayer* const constantLayer = network->AddConstantLayer(constTensor, "constant");
2391  armnn::IConnectableLayer* const switchLayer = network->AddSwitchLayer(layerName.c_str());
2392  armnn::IConnectableLayer* const trueOutputLayer = network->AddOutputLayer(0);
2393  armnn::IConnectableLayer* const falseOutputLayer = network->AddOutputLayer(1);
2394 
2395  inputLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(0));
2396  constantLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(1));
2397  switchLayer->GetOutputSlot(0).Connect(trueOutputLayer->GetInputSlot(0));
2398  switchLayer->GetOutputSlot(1).Connect(falseOutputLayer->GetInputSlot(0));
2399 
2400  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2401  constantLayer->GetOutputSlot(0).SetTensorInfo(info);
2402  switchLayer->GetOutputSlot(0).SetTensorInfo(info);
2403  switchLayer->GetOutputSlot(1).SetTensorInfo(info);
2404 
2405  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2406  CHECK(deserializedNetwork);
2407 
2408  SwitchLayerVerifier verifier(layerName, {info, info}, {info, info});
2409  deserializedNetwork->ExecuteStrategy(verifier);
2410 }
2411 
2412 TEST_CASE("SerializeTranspose")
2413 {
2414  const std::string layerName("transpose");
2415  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
2416  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
2417 
2418  armnn::TransposeDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
2419 
2421  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2422  armnn::IConnectableLayer* const transposeLayer = network->AddTransposeLayer(descriptor, layerName.c_str());
2423  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2424 
2425  inputLayer->GetOutputSlot(0).Connect(transposeLayer->GetInputSlot(0));
2426  transposeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2427 
2428  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2429  transposeLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2430 
2431  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2432  CHECK(deserializedNetwork);
2433 
2435  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
2436  deserializedNetwork->ExecuteStrategy(verifier);
2437 }
2438 
2439 TEST_CASE("SerializeTransposeConvolution2d")
2440 {
2441  const std::string layerName("transposeConvolution2d");
2442  const armnn::TensorInfo inputInfo ({ 1, 7, 7, 1 }, armnn::DataType::Float32);
2443  const armnn::TensorInfo outputInfo({ 1, 9, 9, 1 }, armnn::DataType::Float32);
2444 
2445  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
2446  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32);
2447 
2448  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
2449  armnn::ConstTensor weights(weightsInfo, weightsData);
2450 
2451  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
2452  armnn::ConstTensor biases(biasesInfo, biasesData);
2453 
2455  descriptor.m_PadLeft = 1;
2456  descriptor.m_PadRight = 1;
2457  descriptor.m_PadTop = 1;
2458  descriptor.m_PadBottom = 1;
2459  descriptor.m_StrideX = 1;
2460  descriptor.m_StrideY = 1;
2461  descriptor.m_BiasEnabled = true;
2463 
2465  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2466  armnn::IConnectableLayer* const convLayer =
2467  network->AddTransposeConvolution2dLayer(descriptor,
2468  weights,
2470  layerName.c_str());
2471  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2472 
2473  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2474  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2475 
2476  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2477  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2478 
2479  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2480  CHECK(deserializedNetwork);
2481 
2482  const std::vector<armnn::ConstTensor> constants {weights, biases};
2484  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
2485  deserializedNetwork->ExecuteStrategy(verifier);
2486 }
2487 
2488 TEST_CASE("SerializeDeserializeNonLinearNetwork")
2489 {
2490  class ConstantLayerVerifier : public LayerVerifierBase
2491  {
2492  public:
2493  ConstantLayerVerifier(const std::string& layerName,
2494  const std::vector<armnn::TensorInfo>& inputInfos,
2495  const std::vector<armnn::TensorInfo>& outputInfos,
2496  const armnn::ConstTensor& layerInput)
2497  : LayerVerifierBase(layerName, inputInfos, outputInfos)
2498  , m_LayerInput(layerInput) {}
2499 
2500  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2501  const armnn::BaseDescriptor& descriptor,
2502  const std::vector<armnn::ConstTensor>& constants,
2503  const char* name,
2504  const armnn::LayerBindingId id = 0) override
2505  {
2506  armnn::IgnoreUnused(descriptor, constants, id);
2507  switch (layer->GetType())
2508  {
2509  case armnn::LayerType::Input: break;
2510  case armnn::LayerType::Output: break;
2511  case armnn::LayerType::Addition: break;
2513  {
2514  VerifyNameAndConnections(layer, name);
2515  CompareConstTensor(constants.at(0), m_LayerInput);
2516  break;
2517  }
2518  default:
2519  {
2520  throw armnn::Exception("Unexpected layer type in test model");
2521  }
2522  }
2523  }
2524 
2525  private:
2526  armnn::ConstTensor m_LayerInput;
2527  };
2528 
2529  const std::string layerName("constant");
2531 
2532  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2533  armnn::ConstTensor constTensor(info, constantData);
2534 
2536  armnn::IConnectableLayer* input = network->AddInputLayer(0);
2537  armnn::IConnectableLayer* add = network->AddAdditionLayer();
2538  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
2539  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
2540 
2541  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
2542  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
2543  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2544 
2545  input->GetOutputSlot(0).SetTensorInfo(info);
2546  constant->GetOutputSlot(0).SetTensorInfo(info);
2547  add->GetOutputSlot(0).SetTensorInfo(info);
2548 
2549  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2550  CHECK(deserializedNetwork);
2551 
2552  ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
2553  deserializedNetwork->ExecuteStrategy(verifier);
2554 }
2555 
2556 }
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
float m_ScaleW
Center size encoding scale weight.
bool m_BiasEnabled
Enable/disable bias.
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
uint32_t m_PadBottom
Padding bottom value in the height dimension.
A ReshapeDescriptor for the ReshapeLayer.
armnn::INetworkPtr DeserializeNetwork(const std::string &serializerString)
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ComparisonDescriptor for the ComparisonLayer.
Definition: Descriptors.hpp:78
float m_ScaleX
Center size encoding scale x.
uint32_t m_TargetWidth
Target width value.
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
A Convolution2dDescriptor for the Convolution2dLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
bool m_BiasEnabled
Enable/disable bias.
ResizeMethod m_Method
The Interpolation method to use (Bilinear, NearestNeighbor).
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
float m_Beta
Exponentiation value.
The padding fields don&#39;t count and are ignored.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
Definition: Descriptors.hpp:70
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A LogicalBinaryDescriptor for the LogicalBinaryLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Copyright (c) 2021 ARM Limited and Contributors.
void IgnoreUnused(Ts &&...)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_DilationY
Dilation along y axis.
int32_t m_EndMask
End mask value.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
uint32_t m_DilationY
Dilation factor value for height dimension.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:244
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
A ResizeDescriptor for the ResizeLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
Base class for all descriptors.
Definition: Descriptors.hpp:22
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
A StackDescriptor for the StackLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
constexpr char const * GetUnaryOperationAsCString(UnaryOperation operation)
Definition: TypesUtils.hpp:71
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
A PadDescriptor for the PadLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
DataType
Definition: Types.hpp:35
float m_NmsIouThreshold
Intersection over union threshold.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
uint32_t m_DilationX
Dilation factor value for width dimension.
uint32_t m_PadTop
Padding top value in the height dimension.
Status SetViewSize(uint32_t view, uint32_t coord, uint32_t value)
Set the size of the views.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A L2NormalizationDescriptor for the L2NormalizationLayer.
void VerifyNameAndConnections(const armnn::IConnectableLayer *layer, const char *name)
An ArgMinMaxDescriptor for ArgMinMaxLayer.
Definition: Descriptors.hpp:56
An OriginsDescriptor for the ConcatLayer.
A ReduceDescriptor for the REDUCE operators.
A FullyConnectedDescriptor for the FullyConnectedLayer.
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
uint32_t m_TargetWidth
Target width value.
A GatherDescriptor for the GatherLayer.
uint32_t m_NumClasses
Number of classes.
bool m_HalfPixelCenters
Half Pixel Centers.
uint32_t m_PadTop
Padding top value in the height dimension.
A StandInDescriptor for the StandIn layer.
LayerVerifierBase(const std::string &layerName, const std::vector< armnn::TensorInfo > &inputInfos, const std::vector< armnn::TensorInfo > &outputInfos)
bool m_UseRegularNms
Use Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_TargetHeight
Target height value.
A SliceDescriptor for the SliceLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
float m_ScaleH
Center size encoding scale height.
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_PadLeft
Padding left value in the width dimension.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
bool m_AlignCorners
Aligned corners.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int32_t m_Axis
The axis in params to gather indices from.
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
Definition: Descriptors.hpp:98
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
TEST_SUITE("SerializerTests")
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
void CompareConstTensor(const armnn::ConstTensor &tensor1, const armnn::ConstTensor &tensor2)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
A MeanDescriptor for the MeanLayer.
UnaryOperation
Definition: Types.hpp:104
uint32_t m_PadRight
Padding right value in the width dimension.
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
float m_ScaleY
Center size encoding scale y.
OriginsDescriptor CreateDescriptorForConcatenation(TensorShapeIt first, TensorShapeIt last, unsigned int concatenationDimension)
Convenience template to create an OriginsDescriptor to use when creating a ConcatLayer for performing...
float m_NmsScoreThreshold
NMS score threshold.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:172
virtual int Connect(IInputSlot &destination)=0
A Pooling2dDescriptor for the Pooling2dLayer.
std::string SerializeNetwork(const armnn::INetwork &network)
A NormalizationDescriptor for the NormalizationLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
A ResizeBilinearDescriptor for the ResizeBilinearLayer.
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:530
A SoftmaxDescriptor for the SoftmaxLayer.
Status SetViewOriginCoord(uint32_t view, uint32_t coord, uint32_t value)
Set the view origin coordinates.
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
A FillDescriptor for the FillLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A PermuteDescriptor for the PermuteLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
bool m_ConstantWeights
Enable/disable constant weights and biases.