ArmNN
 22.08
SerializerTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "../Serializer.hpp"
8 
9 #include <armnn/Descriptors.hpp>
10 #include <armnn/INetwork.hpp>
11 #include <armnn/TypesUtils.hpp>
12 #include <armnn/LstmParams.hpp>
16 
17 #include <random>
18 #include <vector>
19 
20 #include <doctest/doctest.h>
21 
23 
24 TEST_SUITE("SerializerTests")
25 {
26 
27 TEST_CASE("SerializeAddition")
28 {
29  const std::string layerName("addition");
30  const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32);
31 
33  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
34  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
35  armnn::IConnectableLayer* const additionLayer = network->AddAdditionLayer(layerName.c_str());
36  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
37 
38  inputLayer0->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
39  inputLayer1->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
40  additionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
41 
42  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
43  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
44  additionLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
45 
46  std::string serializedNetwork = SerializeNetwork(*network);
47  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(serializedNetwork);
48  CHECK(deserializedNetwork);
49 
50  LayerVerifierBase verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo});
51  deserializedNetwork->ExecuteStrategy(verifier);
52 }
53 
54 void SerializeArgMinMaxTest(armnn::DataType dataType)
55 {
56  const std::string layerName("argminmax");
57  const armnn::TensorInfo inputInfo({1, 2, 3}, armnn::DataType::Float32);
58  const armnn::TensorInfo outputInfo({1, 3}, dataType);
59 
60  armnn::ArgMinMaxDescriptor descriptor;
62  descriptor.m_Axis = 1;
63 
65  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
66  armnn::IConnectableLayer* const argMinMaxLayer = network->AddArgMinMaxLayer(descriptor, layerName.c_str());
67  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
68 
69  inputLayer->GetOutputSlot(0).Connect(argMinMaxLayer->GetInputSlot(0));
70  argMinMaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
71 
72  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
73  argMinMaxLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
74 
75  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
76  CHECK(deserializedNetwork);
77 
79  {inputInfo},
80  {outputInfo},
81  descriptor);
82  deserializedNetwork->ExecuteStrategy(verifier);
83 }
84 
85 TEST_CASE("SerializeArgMinMaxSigned32")
86 {
87  SerializeArgMinMaxTest(armnn::DataType::Signed32);
88 }
89 
90 TEST_CASE("SerializeArgMinMaxSigned64")
91 {
92  SerializeArgMinMaxTest(armnn::DataType::Signed64);
93 }
94 
95 TEST_CASE("SerializeBatchNormalization")
96 {
97  const std::string layerName("batchNormalization");
98  const armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32);
99  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
100 
101  const armnn::TensorInfo meanInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
102  const armnn::TensorInfo varianceInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
103  const armnn::TensorInfo betaInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
104  const armnn::TensorInfo gammaInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
105 
107  descriptor.m_Eps = 0.0010000000475f;
108  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
109 
110  std::vector<float> meanData({5.0});
111  std::vector<float> varianceData({2.0});
112  std::vector<float> betaData({1.0});
113  std::vector<float> gammaData({0.0});
114 
115  std::vector<armnn::ConstTensor> constants;
116  constants.emplace_back(armnn::ConstTensor(meanInfo, meanData));
117  constants.emplace_back(armnn::ConstTensor(varianceInfo, varianceData));
118  constants.emplace_back(armnn::ConstTensor(betaInfo, betaData));
119  constants.emplace_back(armnn::ConstTensor(gammaInfo, gammaData));
120 
122  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
123  armnn::IConnectableLayer* const batchNormalizationLayer =
124  network->AddBatchNormalizationLayer(descriptor,
125  constants[0],
126  constants[1],
127  constants[2],
128  constants[3],
129  layerName.c_str());
130  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
131 
132  inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
133  batchNormalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
134 
135  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
136  batchNormalizationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
137 
138  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
139  CHECK(deserializedNetwork);
140 
142  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
143  deserializedNetwork->ExecuteStrategy(verifier);
144 }
145 
146 TEST_CASE("SerializeBatchToSpaceNd")
147 {
148  const std::string layerName("spaceToBatchNd");
149  const armnn::TensorInfo inputInfo({4, 1, 2, 2}, armnn::DataType::Float32);
150  const armnn::TensorInfo outputInfo({1, 1, 4, 4}, armnn::DataType::Float32);
151 
154  desc.m_BlockShape = {2, 2};
155  desc.m_Crops = {{0, 0}, {0, 0}};
156 
158  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
159  armnn::IConnectableLayer* const batchToSpaceNdLayer = network->AddBatchToSpaceNdLayer(desc, layerName.c_str());
160  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
161 
162  inputLayer->GetOutputSlot(0).Connect(batchToSpaceNdLayer->GetInputSlot(0));
163  batchToSpaceNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
164 
165  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
166  batchToSpaceNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
167 
168  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
169  CHECK(deserializedNetwork);
170 
172  {inputInfo},
173  {outputInfo},
174  desc);
175  deserializedNetwork->ExecuteStrategy(verifier);
176 }
177 
178 TEST_CASE("SerializeCast")
179 {
180  const std::string layerName("cast");
181 
182  const armnn::TensorShape shape{1, 5, 2, 3};
183 
186 
188  armnn::IConnectableLayer* inputLayer = network->AddInputLayer(0);
189  armnn::IConnectableLayer* castLayer = network->AddCastLayer(layerName.c_str());
190  armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0);
191 
192  inputLayer->GetOutputSlot(0).Connect(castLayer->GetInputSlot(0));
193  castLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
194 
195  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
196  castLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
197 
198  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
199  CHECK(deserializedNetwork);
200 
201  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
202  deserializedNetwork->ExecuteStrategy(verifier);
203 }
204 
205 TEST_CASE("SerializeChannelShuffle")
206 {
207  const std::string layerName("channelShuffle");
208  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
209  const armnn::TensorInfo outputInfo({1, 9}, armnn::DataType::Float32);
210 
211  armnn::ChannelShuffleDescriptor descriptor({3, 1});
212 
214  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
215  armnn::IConnectableLayer* const ChannelShuffleLayer =
216  network->AddChannelShuffleLayer(descriptor, layerName.c_str());
217  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
218 
219  inputLayer->GetOutputSlot(0).Connect(ChannelShuffleLayer->GetInputSlot(0));
220  ChannelShuffleLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
221 
222  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
223  ChannelShuffleLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
224 
225  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
226  CHECK(deserializedNetwork);
227 
229  layerName, {inputInfo}, {outputInfo}, descriptor);
230  deserializedNetwork->ExecuteStrategy(verifier);
231 }
232 
233 TEST_CASE("SerializeComparison")
234 {
235  const std::string layerName("comparison");
236 
237  const armnn::TensorShape shape{2, 1, 2, 4};
238 
241 
243 
245  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
246  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
247  armnn::IConnectableLayer* const comparisonLayer = network->AddComparisonLayer(descriptor, layerName.c_str());
248  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
249 
250  inputLayer0->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(0));
251  inputLayer1->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(1));
252  comparisonLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
253 
254  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
255  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
256  comparisonLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
257 
258  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
259  CHECK(deserializedNetwork);
260 
262  { inputInfo, inputInfo },
263  { outputInfo },
264  descriptor);
265  deserializedNetwork->ExecuteStrategy(verifier);
266 }
267 
268 TEST_CASE("SerializeConstant")
269 {
270  class ConstantLayerVerifier : public LayerVerifierBase
271  {
272  public:
273  ConstantLayerVerifier(const std::string& layerName,
274  const std::vector<armnn::TensorInfo>& inputInfos,
275  const std::vector<armnn::TensorInfo>& outputInfos,
276  const std::vector<armnn::ConstTensor>& constants)
277  : LayerVerifierBase(layerName, inputInfos, outputInfos)
278  , m_Constants(constants) {}
279 
280  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
281  const armnn::BaseDescriptor& descriptor,
282  const std::vector<armnn::ConstTensor>& constants,
283  const char* name,
284  const armnn::LayerBindingId id = 0) override
285  {
286  armnn::IgnoreUnused(descriptor, id);
287 
288  switch (layer->GetType())
289  {
290  case armnn::LayerType::Input: break;
291  case armnn::LayerType::Output: break;
292  case armnn::LayerType::Addition: break;
293  default:
294  {
295  this->VerifyNameAndConnections(layer, name);
296 
297  for (std::size_t i = 0; i < constants.size(); i++)
298  {
299  CompareConstTensor(constants[i], m_Constants[i]);
300  }
301  }
302  }
303  }
304 
305  private:
306  const std::vector<armnn::ConstTensor> m_Constants;
307  };
308 
309  const std::string layerName("constant");
310  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
311 
312  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
313  armnn::ConstTensor constTensor(info, constantData);
314 
316  armnn::IConnectableLayer* input = network->AddInputLayer(0);
317  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
318  armnn::IConnectableLayer* add = network->AddAdditionLayer();
319  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
320 
321  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
322  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
323  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
324 
325  input->GetOutputSlot(0).SetTensorInfo(info);
326  constant->GetOutputSlot(0).SetTensorInfo(info);
327  add->GetOutputSlot(0).SetTensorInfo(info);
328 
329  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
330  CHECK(deserializedNetwork);
331 
332  ConstantLayerVerifier verifier(layerName, {}, {info}, {constTensor});
333  deserializedNetwork->ExecuteStrategy(verifier);
334 }
335 
336 using Convolution2dDescriptor = armnn::Convolution2dDescriptor;
337 class Convolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Convolution2dDescriptor>
338 {
339 public:
340  Convolution2dLayerVerifier(const std::string& layerName,
341  const std::vector<armnn::TensorInfo>& inputInfos,
342  const std::vector<armnn::TensorInfo>& outputInfos,
343  const Convolution2dDescriptor& descriptor)
344  : LayerVerifierBaseWithDescriptor<Convolution2dDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
345 
346  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
347  const armnn::BaseDescriptor& descriptor,
348  const std::vector<armnn::ConstTensor>& constants,
349  const char* name,
350  const armnn::LayerBindingId id = 0) override
351  {
352  armnn::IgnoreUnused(constants, id);
353  switch (layer->GetType())
354  {
355  case armnn::LayerType::Input: break;
356  case armnn::LayerType::Output: break;
357  case armnn::LayerType::Constant: break;
358  default:
359  {
360  VerifyNameAndConnections(layer, name);
361  const Convolution2dDescriptor& layerDescriptor =
362  static_cast<const Convolution2dDescriptor&>(descriptor);
363  CHECK(layerDescriptor.m_BiasEnabled == m_Descriptor.m_BiasEnabled);
364  }
365  }
366  }
367 };
368 
369 TEST_CASE("SerializeConvolution2d")
370 {
371  const std::string layerName("convolution2d");
372  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
373  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
374 
375  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
376  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
377 
378  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
379  armnn::ConstTensor weights(weightsInfo, weightsData);
380 
381  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
382  armnn::ConstTensor biases(biasesInfo, biasesData);
383 
385  descriptor.m_PadLeft = 1;
386  descriptor.m_PadRight = 1;
387  descriptor.m_PadTop = 1;
388  descriptor.m_PadBottom = 1;
389  descriptor.m_StrideX = 2;
390  descriptor.m_StrideY = 2;
391  descriptor.m_DilationX = 2;
392  descriptor.m_DilationY = 2;
393  descriptor.m_BiasEnabled = true;
395 
397  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
399  armnn::IConnectableLayer* const convLayer =
400  network->AddConvolution2dLayer(descriptor,
401  weights,
403  layerName.c_str());
405  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
406 
407  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
408  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
409 
410  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
411  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
412 
413  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
414  CHECK(deserializedNetwork);
415 
416  Convolution2dLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
417  deserializedNetwork->ExecuteStrategy(verifier);
418 }
419 
420 TEST_CASE("SerializeConvolution2dWithPerAxisParamsTestDeprecatedMethod")
421 {
422  using namespace armnn;
423 
424  const std::string layerName("convolution2dWithPerAxis");
425  const TensorInfo inputInfo ({ 1, 3, 1, 2 }, DataType::QAsymmU8, 0.55f, 128);
426  const TensorInfo outputInfo({ 1, 3, 1, 3 }, DataType::QAsymmU8, 0.75f, 128);
427 
428  const std::vector<float> quantScales{ 0.75f, 0.65f, 0.85f };
429  constexpr unsigned int quantDimension = 0;
430 
431  const TensorInfo kernelInfo({ 3, 1, 1, 2 }, DataType::QSymmS8, quantScales, quantDimension, true);
432 
433  const std::vector<float> biasQuantScales{ 0.25f, 0.50f, 0.75f };
434  const TensorInfo biasInfo({ 3 }, DataType::Signed32, biasQuantScales, quantDimension, true);
435 
436  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
437  armnn::ConstTensor weights(kernelInfo, kernelData);
438  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
439  armnn::ConstTensor biases(biasInfo, biasData);
440 
441  Convolution2dDescriptor descriptor;
442  descriptor.m_StrideX = 1;
443  descriptor.m_StrideY = 1;
444  descriptor.m_PadLeft = 0;
445  descriptor.m_PadRight = 0;
446  descriptor.m_PadTop = 0;
447  descriptor.m_PadBottom = 0;
448  descriptor.m_BiasEnabled = true;
449  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
450 
452  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
454  armnn::IConnectableLayer* const convLayer =
455  network->AddConvolution2dLayer(descriptor,
456  weights,
458  layerName.c_str());
460  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
461 
462  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
463  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
464 
465  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
466  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
467 
468  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
469  CHECK(deserializedNetwork);
470 
471  Convolution2dLayerVerifier verifier(layerName, {inputInfo, kernelInfo, biasInfo}, {outputInfo}, descriptor);
472 
473  deserializedNetwork->ExecuteStrategy(verifier);
474 }
475 
476 TEST_CASE("SerializeConvolution2dWeightsAndBiasesAsConstantLayers")
477 {
478  const std::string layerName("convolution2d");
479  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
480  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
481 
482  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
483  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
484 
485  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
486  armnn::ConstTensor weights(weightsInfo, weightsData);
487 
488  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
489  armnn::ConstTensor biases(biasesInfo, biasesData);
490 
492  descriptor.m_PadLeft = 1;
493  descriptor.m_PadRight = 1;
494  descriptor.m_PadTop = 1;
495  descriptor.m_PadBottom = 1;
496  descriptor.m_StrideX = 2;
497  descriptor.m_StrideY = 2;
498  descriptor.m_DilationX = 2;
499  descriptor.m_DilationY = 2;
500  descriptor.m_BiasEnabled = true;
502 
504  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
505  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
506  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
507  armnn::IConnectableLayer* const convLayer = network->AddConvolution2dLayer(descriptor,
508  layerName.c_str());
509  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
510 
511  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
512  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
513  biasesLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
514  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
515 
516  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
517  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
518  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
519  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
520 
521  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
522  CHECK(deserializedNetwork);
523 
524  const std::vector<armnn::ConstTensor>& constants {weights, biases};
526  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
527 
528  deserializedNetwork->ExecuteStrategy(verifier);
529 }
530 
531 TEST_CASE("SerializeConvolution3d")
532 {
533  const std::string layerName("convolution3d");
534  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 5, 1 }, armnn::DataType::Float32);
535  const armnn::TensorInfo outputInfo({ 1, 2, 2, 2, 1 }, armnn::DataType::Float32);
536 
537  const armnn::TensorInfo weightsInfo({ 3, 3, 3, 1, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
538  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
539 
540  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
541  armnn::ConstTensor weights(weightsInfo, weightsData);
542 
543  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
544  armnn::ConstTensor biases(biasesInfo, biasesData);
545 
547  descriptor.m_PadLeft = 0;
548  descriptor.m_PadRight = 0;
549  descriptor.m_PadTop = 0;
550  descriptor.m_PadBottom = 0;
551  descriptor.m_PadFront = 0;
552  descriptor.m_PadBack = 0;
553  descriptor.m_DilationX = 1;
554  descriptor.m_DilationY = 1;
555  descriptor.m_DilationZ = 1;
556  descriptor.m_StrideX = 2;
557  descriptor.m_StrideY = 2;
558  descriptor.m_StrideZ = 2;
559  descriptor.m_BiasEnabled = true;
561 
563  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
564  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
565  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
566  armnn::IConnectableLayer* const convLayer = network->AddConvolution3dLayer(descriptor, layerName.c_str());
567  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
568 
569  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
570  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
571  biasesLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
572  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
573 
574  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
575  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
576  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
577  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
578 
579  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
580  CHECK(deserializedNetwork);
581 
583  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
584  deserializedNetwork->ExecuteStrategy(verifier);
585 }
586 
587 TEST_CASE("SerializeDepthToSpace")
588 {
589  const std::string layerName("depthToSpace");
590 
591  const armnn::TensorInfo inputInfo ({ 1, 8, 4, 12 }, armnn::DataType::Float32);
592  const armnn::TensorInfo outputInfo({ 1, 16, 8, 3 }, armnn::DataType::Float32);
593 
595  desc.m_BlockSize = 2;
596  desc.m_DataLayout = armnn::DataLayout::NHWC;
597 
599  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
600  armnn::IConnectableLayer* const depthToSpaceLayer = network->AddDepthToSpaceLayer(desc, layerName.c_str());
601  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
602 
603  inputLayer->GetOutputSlot(0).Connect(depthToSpaceLayer->GetInputSlot(0));
604  depthToSpaceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
605 
606  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
607  depthToSpaceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
608 
609  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
610  CHECK(deserializedNetwork);
611 
612  LayerVerifierBaseWithDescriptor<armnn::DepthToSpaceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
613  deserializedNetwork->ExecuteStrategy(verifier);
614 }
615 
616 TEST_CASE("SerializeDepthwiseConvolution2d")
617 {
618  const std::string layerName("depwiseConvolution2d");
619  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32);
620  const armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
621 
622  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
623  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
624 
625  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
626  armnn::ConstTensor weights(weightsInfo, weightsData);
627 
628  std::vector<int32_t> biasesData = GenerateRandomData<int32_t>(biasesInfo.GetNumElements());
629  armnn::ConstTensor biases(biasesInfo, biasesData);
630 
632  descriptor.m_PadLeft = 1;
633  descriptor.m_PadRight = 1;
634  descriptor.m_PadTop = 1;
635  descriptor.m_PadBottom = 1;
636  descriptor.m_StrideX = 2;
637  descriptor.m_StrideY = 2;
638  descriptor.m_DilationX = 2;
639  descriptor.m_DilationY = 2;
640  descriptor.m_BiasEnabled = true;
642 
644  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
645  armnn::IConnectableLayer* const depthwiseConvLayer = network->AddDepthwiseConvolution2dLayer(descriptor,
646  layerName.c_str());
647  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
648 
649  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
650  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
651 
652  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
653  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
654 
655  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights);
656  weightsLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(1u));
657  weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
658 
659  armnn::IConnectableLayer* const biasLayer = network->AddConstantLayer(biases);
660  biasLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(2u));
661  biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
662 
663  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
664  CHECK(deserializedNetwork);
665 
666  const std::vector<armnn::ConstTensor>& constants {weights, biases};
668  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
669  deserializedNetwork->ExecuteStrategy(verifier);
670 }
671 
672 TEST_CASE("SerializeDepthwiseConvolution2dWithPerAxisParams")
673 {
674  using namespace armnn;
675 
676  const std::string layerName("depwiseConvolution2dWithPerAxis");
677  const TensorInfo inputInfo ({ 1, 3, 3, 2 }, DataType::QAsymmU8, 0.55f, 128);
678  const TensorInfo outputInfo({ 1, 2, 2, 4 }, DataType::QAsymmU8, 0.75f, 128);
679 
680  const std::vector<float> quantScales{ 0.75f, 0.80f, 0.90f, 0.95f };
681  const unsigned int quantDimension = 0;
682  TensorInfo kernelInfo({ 2, 2, 2, 2 }, DataType::QSymmS8, quantScales, quantDimension, true);
683 
684  const std::vector<float> biasQuantScales{ 0.25f, 0.35f, 0.45f, 0.55f };
685  constexpr unsigned int biasQuantDimension = 0;
686  TensorInfo biasInfo({ 4 }, DataType::Signed32, biasQuantScales, biasQuantDimension, true);
687 
688  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
689  armnn::ConstTensor weights(kernelInfo, kernelData);
690  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
691  armnn::ConstTensor biases(biasInfo, biasData);
692 
694  descriptor.m_StrideX = 1;
695  descriptor.m_StrideY = 1;
696  descriptor.m_PadLeft = 0;
697  descriptor.m_PadRight = 0;
698  descriptor.m_PadTop = 0;
699  descriptor.m_PadBottom = 0;
700  descriptor.m_DilationX = 1;
701  descriptor.m_DilationY = 1;
702  descriptor.m_BiasEnabled = true;
704 
706  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
707  armnn::IConnectableLayer* const depthwiseConvLayer = network->AddDepthwiseConvolution2dLayer(descriptor,
708  layerName.c_str());
709  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
710 
711  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
712  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
713 
714  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
715  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
716 
717  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights);
718  weightsLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(1u));
719  weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
720 
721  armnn::IConnectableLayer* const biasLayer = network->AddConstantLayer(biases);
722  biasLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(2u));
723  biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
724 
725  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
726  CHECK(deserializedNetwork);
727 
728  const std::vector<armnn::ConstTensor>& constants {weights, biases};
730  layerName, {inputInfo, kernelInfo, biasInfo}, {outputInfo}, descriptor, constants);
731  deserializedNetwork->ExecuteStrategy(verifier);
732 }
733 
734 TEST_CASE("SerializeDepthwiseConvolution2dWeightsAndBiasesAsConstantLayers")
735 {
736  const std::string layerName("depthwiseConvolution2d");
737  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
738  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
739 
740  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
741  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
742 
743  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
744  armnn::ConstTensor weights(weightsInfo, weightsData);
745 
746  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
747  armnn::ConstTensor biases(biasesInfo, biasesData);
748 
750  descriptor.m_PadLeft = 1;
751  descriptor.m_PadRight = 1;
752  descriptor.m_PadTop = 1;
753  descriptor.m_PadBottom = 1;
754  descriptor.m_StrideX = 2;
755  descriptor.m_StrideY = 2;
756  descriptor.m_DilationX = 2;
757  descriptor.m_DilationY = 2;
758  descriptor.m_BiasEnabled = true;
760 
762  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
763  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
764  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
765  armnn::IConnectableLayer* const convLayer = network->AddDepthwiseConvolution2dLayer(descriptor,
766  layerName.c_str());
767  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
768 
769  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
770  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
771  biasesLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
772  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
773 
774  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
775  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
776  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
777  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
778 
779  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
780  CHECK(deserializedNetwork);
781 
782  const std::vector<armnn::ConstTensor>& constants {weights, biases};
784  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
785 
786  deserializedNetwork->ExecuteStrategy(verifier);
787 }
788 
789 TEST_CASE("SerializeDequantize")
790 {
791  const std::string layerName("dequantize");
792  const armnn::TensorInfo inputInfo({ 1, 5, 2, 3 }, armnn::DataType::QAsymmU8, 0.5f, 1);
793  const armnn::TensorInfo outputInfo({ 1, 5, 2, 3 }, armnn::DataType::Float32);
794 
796  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
797  armnn::IConnectableLayer* const dequantizeLayer = network->AddDequantizeLayer(layerName.c_str());
798  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
799 
800  inputLayer->GetOutputSlot(0).Connect(dequantizeLayer->GetInputSlot(0));
801  dequantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
802 
803  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
804  dequantizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
805 
806  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
807  CHECK(deserializedNetwork);
808 
809  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
810  deserializedNetwork->ExecuteStrategy(verifier);
811 }
812 
813 TEST_CASE("SerializeDeserializeDetectionPostProcess")
814 {
815  const std::string layerName("detectionPostProcess");
816 
817  const std::vector<armnn::TensorInfo> inputInfos({
820  });
821 
822  const std::vector<armnn::TensorInfo> outputInfos({
827  });
828 
830  descriptor.m_UseRegularNms = true;
831  descriptor.m_MaxDetections = 3;
832  descriptor.m_MaxClassesPerDetection = 1;
833  descriptor.m_DetectionsPerClass =1;
834  descriptor.m_NmsScoreThreshold = 0.0;
835  descriptor.m_NmsIouThreshold = 0.5;
836  descriptor.m_NumClasses = 2;
837  descriptor.m_ScaleY = 10.0;
838  descriptor.m_ScaleX = 10.0;
839  descriptor.m_ScaleH = 5.0;
840  descriptor.m_ScaleW = 5.0;
841 
842  const armnn::TensorInfo anchorsInfo({ 6, 4 }, armnn::DataType::Float32, 0.0f, 0, true);
843  const std::vector<float> anchorsData({
844  0.5f, 0.5f, 1.0f, 1.0f,
845  0.5f, 0.5f, 1.0f, 1.0f,
846  0.5f, 0.5f, 1.0f, 1.0f,
847  0.5f, 10.5f, 1.0f, 1.0f,
848  0.5f, 10.5f, 1.0f, 1.0f,
849  0.5f, 100.5f, 1.0f, 1.0f
850  });
851  armnn::ConstTensor anchors(anchorsInfo, anchorsData);
852 
854  armnn::IConnectableLayer* const detectionLayer =
855  network->AddDetectionPostProcessLayer(descriptor, anchors, layerName.c_str());
856 
857  for (unsigned int i = 0; i < 2; i++)
858  {
859  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(static_cast<int>(i));
860  inputLayer->GetOutputSlot(0).Connect(detectionLayer->GetInputSlot(i));
861  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfos[i]);
862  }
863 
864  for (unsigned int i = 0; i < 4; i++)
865  {
866  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(static_cast<int>(i));
867  detectionLayer->GetOutputSlot(i).Connect(outputLayer->GetInputSlot(0));
868  detectionLayer->GetOutputSlot(i).SetTensorInfo(outputInfos[i]);
869  }
870 
871  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
872  CHECK(deserializedNetwork);
873 
874  const std::vector<armnn::ConstTensor>& constants {anchors};
876  layerName, inputInfos, outputInfos, descriptor, constants);
877  deserializedNetwork->ExecuteStrategy(verifier);
878 }
879 
880 TEST_CASE("SerializeDivision")
881 {
882  const std::string layerName("division");
883  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
884 
886  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
887  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
888  armnn::IConnectableLayer* const divisionLayer = network->AddDivisionLayer(layerName.c_str());
889  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
890 
891  inputLayer0->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(0));
892  inputLayer1->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(1));
893  divisionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
894 
895  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
896  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
897  divisionLayer->GetOutputSlot(0).SetTensorInfo(info);
898 
899  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
900  CHECK(deserializedNetwork);
901 
902  LayerVerifierBase verifier(layerName, {info, info}, {info});
903  deserializedNetwork->ExecuteStrategy(verifier);
904 }
905 
906 TEST_CASE("SerializeDeserializeComparisonEqual")
907 {
908  const std::string layerName("EqualLayer");
909  const armnn::TensorInfo inputTensorInfo1 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
910  const armnn::TensorInfo inputTensorInfo2 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
911  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Boolean);
912 
914  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
915  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
917  armnn::IConnectableLayer* const equalLayer = network->AddComparisonLayer(equalDescriptor, layerName.c_str());
918  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
919 
920  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
921  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo1);
922  inputLayer2->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
923  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo2);
924  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
925  equalLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
926 
927  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
928  CHECK(deserializedNetwork);
929 
930  LayerVerifierBase verifier(layerName, {inputTensorInfo1, inputTensorInfo2}, {outputTensorInfo});
931  deserializedNetwork->ExecuteStrategy(verifier);
932 }
933 
934 void SerializeElementwiseUnaryTest(armnn::UnaryOperation unaryOperation)
935 {
936  auto layerName = GetUnaryOperationAsCString(unaryOperation);
937 
938  const armnn::TensorShape shape{2, 1, 2, 2};
939 
942 
943  armnn::ElementwiseUnaryDescriptor descriptor(unaryOperation);
944 
946  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
947  armnn::IConnectableLayer* const elementwiseUnaryLayer =
948  network->AddElementwiseUnaryLayer(descriptor, layerName);
949  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
950 
951  inputLayer->GetOutputSlot(0).Connect(elementwiseUnaryLayer->GetInputSlot(0));
952  elementwiseUnaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
953 
954  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
955  elementwiseUnaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
956 
957  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
958 
959  CHECK(deserializedNetwork);
960 
962  verifier(layerName, { inputInfo }, { outputInfo }, descriptor);
963 
964  deserializedNetwork->ExecuteStrategy(verifier);
965 }
966 
967 TEST_CASE("SerializeElementwiseUnary")
968 {
969  using op = armnn::UnaryOperation;
970  std::initializer_list<op> allUnaryOperations = {op::Abs, op::Exp, op::Sqrt, op::Rsqrt, op::Neg,
971  op::LogicalNot, op::Log, op::Sin};
972 
973  for (auto unaryOperation : allUnaryOperations)
974  {
975  SerializeElementwiseUnaryTest(unaryOperation);
976  }
977 }
978 
979 TEST_CASE("SerializeFill")
980 {
981  const std::string layerName("fill");
982  const armnn::TensorInfo inputInfo({4}, armnn::DataType::Signed32);
983  const armnn::TensorInfo outputInfo({1, 3, 3, 1}, armnn::DataType::Float32);
984 
985  armnn::FillDescriptor descriptor(1.0f);
986 
988  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
989  armnn::IConnectableLayer* const fillLayer = network->AddFillLayer(descriptor, layerName.c_str());
990  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
991 
992  inputLayer->GetOutputSlot(0).Connect(fillLayer->GetInputSlot(0));
993  fillLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
994 
995  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
996  fillLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
997 
998  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
999  CHECK(deserializedNetwork);
1000 
1001  LayerVerifierBaseWithDescriptor<armnn::FillDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1002 
1003  deserializedNetwork->ExecuteStrategy(verifier);
1004 }
1005 
1006 TEST_CASE("SerializeFloor")
1007 {
1008  const std::string layerName("floor");
1010 
1012  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1013  armnn::IConnectableLayer* const floorLayer = network->AddFloorLayer(layerName.c_str());
1014  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1015 
1016  inputLayer->GetOutputSlot(0).Connect(floorLayer->GetInputSlot(0));
1017  floorLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1018 
1019  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1020  floorLayer->GetOutputSlot(0).SetTensorInfo(info);
1021 
1022  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1023  CHECK(deserializedNetwork);
1024 
1025  LayerVerifierBase verifier(layerName, {info}, {info});
1026  deserializedNetwork->ExecuteStrategy(verifier);
1027 }
1028 
1030 class FullyConnectedLayerVerifier : public LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>
1031 {
1032 public:
1033  FullyConnectedLayerVerifier(const std::string& layerName,
1034  const std::vector<armnn::TensorInfo>& inputInfos,
1035  const std::vector<armnn::TensorInfo>& outputInfos,
1036  const FullyConnectedDescriptor& descriptor)
1037  : LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1038 
1039  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1040  const armnn::BaseDescriptor& descriptor,
1041  const std::vector<armnn::ConstTensor>& constants,
1042  const char* name,
1043  const armnn::LayerBindingId id = 0) override
1044  {
1045  armnn::IgnoreUnused(constants, id);
1046  switch (layer->GetType())
1047  {
1048  case armnn::LayerType::Input: break;
1049  case armnn::LayerType::Output: break;
1050  case armnn::LayerType::Constant: break;
1051  default:
1052  {
1053  VerifyNameAndConnections(layer, name);
1054  const FullyConnectedDescriptor& layerDescriptor =
1055  static_cast<const FullyConnectedDescriptor&>(descriptor);
1056  CHECK(layerDescriptor.m_ConstantWeights == m_Descriptor.m_ConstantWeights);
1057  CHECK(layerDescriptor.m_BiasEnabled == m_Descriptor.m_BiasEnabled);
1058  CHECK(layerDescriptor.m_TransposeWeightMatrix == m_Descriptor.m_TransposeWeightMatrix);
1059  }
1060  }
1061  }
1062 };
1063 
1064 TEST_CASE("SerializeFullyConnected")
1065 {
1066  const std::string layerName("fullyConnected");
1067  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1068  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1069 
1070  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1071  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1072  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
1073  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
1074  armnn::ConstTensor weights(weightsInfo, weightsData);
1075  armnn::ConstTensor biases(biasesInfo, biasesData);
1076 
1078  descriptor.m_BiasEnabled = true;
1079  descriptor.m_TransposeWeightMatrix = false;
1080  descriptor.m_ConstantWeights = true;
1081 
1083  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1084  armnn::IConnectableLayer* const weightsInputLayer = network->AddInputLayer(1);
1085  armnn::IConnectableLayer* const biasInputLayer = network->AddInputLayer(2);
1086  armnn::IConnectableLayer* const fullyConnectedLayer =
1087  network->AddFullyConnectedLayer(descriptor,
1088  layerName.c_str());
1089  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1090 
1091  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1092  weightsInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
1093  biasInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
1094  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1095 
1096  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1097  weightsInputLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
1098  biasInputLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
1099  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1100 
1101  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1102  CHECK(deserializedNetwork);
1103 
1104  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
1105  deserializedNetwork->ExecuteStrategy(verifier);
1106 }
1107 
1108 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsInputs")
1109 {
1110  const std::string layerName("fullyConnected_weights_as_inputs");
1111  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1112  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1113 
1114  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32);
1115  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
1116 
1119 
1121  descriptor.m_BiasEnabled = true;
1122  descriptor.m_TransposeWeightMatrix = false;
1123  descriptor.m_ConstantWeights = false;
1124 
1126  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1127  armnn::IConnectableLayer* const weightsInputLayer = network->AddInputLayer(1);
1128  armnn::IConnectableLayer* const biasInputLayer = network->AddInputLayer(2);
1129  armnn::IConnectableLayer* const fullyConnectedLayer =
1130  network->AddFullyConnectedLayer(descriptor,
1131  layerName.c_str());
1132  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1133 
1134  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1135  weightsInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
1136  biasInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
1137  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1138 
1139  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1140  weightsInputLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
1141  biasInputLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
1142  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1143 
1144  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1145  CHECK(deserializedNetwork);
1146 
1147  const std::vector<armnn::ConstTensor> constants {};
1149  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
1150  deserializedNetwork->ExecuteStrategy(verifier);
1151 }
1152 
1153 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsConstantLayers")
1154 {
1155  const std::string layerName("fullyConnected_weights_as_inputs");
1156  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1157  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1158 
1159  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1160  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1161 
1162  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
1163  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
1164  armnn::ConstTensor weights(weightsInfo, weightsData);
1165  armnn::ConstTensor biases(biasesInfo, biasesData);
1166 
1168  descriptor.m_BiasEnabled = true;
1169  descriptor.m_TransposeWeightMatrix = false;
1170  descriptor.m_ConstantWeights = true;
1171 
1173  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1174  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
1175  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
1176  armnn::IConnectableLayer* const fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor,layerName.c_str());
1177  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1178 
1179  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1180  weightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
1181  biasesLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
1182  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1183 
1184  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1185  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
1186  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
1187  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1188 
1189  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1190  CHECK(deserializedNetwork);
1191 
1192  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
1193  deserializedNetwork->ExecuteStrategy(verifier);
1194 }
1195 
1196 TEST_CASE("SerializeGather")
1197 {
1199  class GatherLayerVerifier : public LayerVerifierBaseWithDescriptor<GatherDescriptor>
1200  {
1201  public:
1202  GatherLayerVerifier(const std::string& layerName,
1203  const std::vector<armnn::TensorInfo>& inputInfos,
1204  const std::vector<armnn::TensorInfo>& outputInfos,
1205  const GatherDescriptor& descriptor)
1206  : LayerVerifierBaseWithDescriptor<GatherDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1207 
1208  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1209  const armnn::BaseDescriptor& descriptor,
1210  const std::vector<armnn::ConstTensor>& constants,
1211  const char* name,
1212  const armnn::LayerBindingId id = 0) override
1213  {
1214  armnn::IgnoreUnused(constants, id);
1215  switch (layer->GetType())
1216  {
1217  case armnn::LayerType::Input: break;
1218  case armnn::LayerType::Output: break;
1219  case armnn::LayerType::Constant: break;
1220  default:
1221  {
1222  VerifyNameAndConnections(layer, name);
1223  const GatherDescriptor& layerDescriptor = static_cast<const GatherDescriptor&>(descriptor);
1224  CHECK(layerDescriptor.m_Axis == m_Descriptor.m_Axis);
1225  }
1226  }
1227  }
1228  };
1229 
1230  const std::string layerName("gather");
1231  armnn::TensorInfo paramsInfo({ 8 }, armnn::DataType::QAsymmU8);
1232  armnn::TensorInfo outputInfo({ 3 }, armnn::DataType::QAsymmU8);
1233  const armnn::TensorInfo indicesInfo({ 3 }, armnn::DataType::Signed32, 0.0f, 0, true);
1234  GatherDescriptor descriptor;
1235  descriptor.m_Axis = 1;
1236 
1237  paramsInfo.SetQuantizationScale(1.0f);
1238  paramsInfo.SetQuantizationOffset(0);
1239  outputInfo.SetQuantizationScale(1.0f);
1240  outputInfo.SetQuantizationOffset(0);
1241 
1242  const std::vector<int32_t>& indicesData = {7, 6, 5};
1243 
1245  armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
1246  armnn::IConnectableLayer *const constantLayer =
1247  network->AddConstantLayer(armnn::ConstTensor(indicesInfo, indicesData));
1248  armnn::IConnectableLayer *const gatherLayer = network->AddGatherLayer(descriptor, layerName.c_str());
1249  armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
1250 
1251  inputLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(0));
1252  constantLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(1));
1253  gatherLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1254 
1255  inputLayer->GetOutputSlot(0).SetTensorInfo(paramsInfo);
1256  constantLayer->GetOutputSlot(0).SetTensorInfo(indicesInfo);
1257  gatherLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1258 
1259  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1260  CHECK(deserializedNetwork);
1261 
1262  GatherLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo}, descriptor);
1263  deserializedNetwork->ExecuteStrategy(verifier);
1264 }
1265 
1266 TEST_CASE("SerializeGatherNd")
1267 {
1268  class GatherNdLayerVerifier : public LayerVerifierBase
1269  {
1270  public:
1271  GatherNdLayerVerifier(const std::string& layerName,
1272  const std::vector<armnn::TensorInfo>& inputInfos,
1273  const std::vector<armnn::TensorInfo>& outputInfos)
1274  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
1275 
1276  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1277  const armnn::BaseDescriptor&,
1278  const std::vector<armnn::ConstTensor>& constants,
1279  const char* name,
1280  const armnn::LayerBindingId id = 0) override
1281  {
1282  armnn::IgnoreUnused(constants, id);
1283  switch (layer->GetType())
1284  {
1288  break;
1289  default:
1290  {
1291  VerifyNameAndConnections(layer, name);
1292  }
1293  }
1294  }
1295  };
1296 
1297  const std::string layerName("gatherNd");
1298  armnn::TensorInfo paramsInfo({ 6, 3 }, armnn::DataType::QAsymmU8);
1299  armnn::TensorInfo outputInfo({ 3, 3 }, armnn::DataType::QAsymmU8);
1300  const armnn::TensorInfo indicesInfo({ 3, 1 }, armnn::DataType::Signed32, 0.0f, 0, true);
1301 
1302  paramsInfo.SetQuantizationScale(1.0f);
1303  paramsInfo.SetQuantizationOffset(0);
1304  outputInfo.SetQuantizationScale(1.0f);
1305  outputInfo.SetQuantizationOffset(0);
1306 
1307  const std::vector<int32_t>& indicesData = {5, 1, 0};
1308 
1310  armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
1311  armnn::IConnectableLayer *const constantLayer =
1312  network->AddConstantLayer(armnn::ConstTensor(indicesInfo, indicesData));
1313  armnn::IConnectableLayer *const gatherNdLayer = network->AddGatherNdLayer(layerName.c_str());
1314  armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
1315 
1316  inputLayer->GetOutputSlot(0).Connect(gatherNdLayer->GetInputSlot(0));
1317  constantLayer->GetOutputSlot(0).Connect(gatherNdLayer->GetInputSlot(1));
1318  gatherNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1319 
1320  inputLayer->GetOutputSlot(0).SetTensorInfo(paramsInfo);
1321  constantLayer->GetOutputSlot(0).SetTensorInfo(indicesInfo);
1322  gatherNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1323 
1324  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1325  CHECK(deserializedNetwork);
1326 
1327  GatherNdLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo});
1328  deserializedNetwork->ExecuteStrategy(verifier);
1329 }
1330 
1331 TEST_CASE("SerializeComparisonGreater")
1332 {
1333  const std::string layerName("greater");
1334 
1335  const armnn::TensorShape shape{2, 1, 2, 4};
1336 
1339 
1341  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1342  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1344  armnn::IConnectableLayer* const equalLayer = network->AddComparisonLayer(greaterDescriptor, layerName.c_str());
1345  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1346 
1347  inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
1348  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
1349  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1350 
1351  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1352  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1353  equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1354 
1355  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1356  CHECK(deserializedNetwork);
1357 
1358  LayerVerifierBase verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
1359  deserializedNetwork->ExecuteStrategy(verifier);
1360 }
1361 
1362 
1363 TEST_CASE("SerializeInstanceNormalization")
1364 {
1365  const std::string layerName("instanceNormalization");
1366  const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32);
1367 
1369  descriptor.m_Gamma = 1.1f;
1370  descriptor.m_Beta = 0.1f;
1371  descriptor.m_Eps = 0.0001f;
1372  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
1373 
1375  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1376  armnn::IConnectableLayer* const instanceNormLayer =
1377  network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1378  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1379 
1380  inputLayer->GetOutputSlot(0).Connect(instanceNormLayer->GetInputSlot(0));
1381  instanceNormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1382 
1383  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1384  instanceNormLayer->GetOutputSlot(0).SetTensorInfo(info);
1385 
1386  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1387  CHECK(deserializedNetwork);
1388 
1390  layerName, {info}, {info}, descriptor);
1391  deserializedNetwork->ExecuteStrategy(verifier);
1392 }
1393 
1394 TEST_CASE("SerializeL2Normalization")
1395 {
1396  const std::string l2NormLayerName("l2Normalization");
1397  const armnn::TensorInfo info({1, 2, 1, 5}, armnn::DataType::Float32);
1398 
1401  desc.m_Eps = 0.0001f;
1402 
1404  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1405  armnn::IConnectableLayer* const l2NormLayer = network->AddL2NormalizationLayer(desc, l2NormLayerName.c_str());
1406  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1407 
1408  inputLayer0->GetOutputSlot(0).Connect(l2NormLayer->GetInputSlot(0));
1409  l2NormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1410 
1411  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1412  l2NormLayer->GetOutputSlot(0).SetTensorInfo(info);
1413 
1414  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1415  CHECK(deserializedNetwork);
1416 
1418  l2NormLayerName, {info}, {info}, desc);
1419  deserializedNetwork->ExecuteStrategy(verifier);
1420 }
1421 
1422 TEST_CASE("EnsureL2NormalizationBackwardCompatibility")
1423 {
1424  // The hex data below is a flat buffer containing a simple network with one input
1425  // a L2Normalization layer and an output layer with dimensions as per the tensor infos below.
1426  //
1427  // This test verifies that we can still read back these old style
1428  // models without the normalization epsilon value.
1429  const std::vector<uint8_t> l2NormalizationModel =
1430  {
1431  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1432  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1433  0x3C, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1434  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1435  0x04, 0x00, 0x00, 0x00, 0xD6, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
1436  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
1437  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1438  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1439  0x4C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
1440  0x00, 0x20, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1441  0x20, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x06, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1442  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1443  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1F, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x20, 0x00,
1444  0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x6C, 0x32, 0x4E, 0x6F, 0x72, 0x6D, 0x61, 0x6C, 0x69, 0x7A, 0x61, 0x74,
1445  0x69, 0x6F, 0x6E, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00,
1446  0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1447  0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
1448  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
1449  0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1450  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1451  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1452  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1453  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1454  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1455  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1456  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1457  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1458  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1459  0x05, 0x00, 0x00, 0x00, 0x00
1460  };
1461 
1462  armnn::INetworkPtr deserializedNetwork =
1463  DeserializeNetwork(std::string(l2NormalizationModel.begin(), l2NormalizationModel.end()));
1464  CHECK(deserializedNetwork);
1465 
1466  const std::string layerName("l2Normalization");
1467  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 2, 1, 5}, armnn::DataType::Float32);
1468 
1471  // Since this variable does not exist in the l2NormalizationModel dump, the default value will be loaded
1472  desc.m_Eps = 1e-12f;
1473 
1475  layerName, {inputInfo}, {inputInfo}, desc);
1476  deserializedNetwork->ExecuteStrategy(verifier);
1477 }
1478 
1479 TEST_CASE("SerializeLogicalBinary")
1480 {
1481  const std::string layerName("logicalBinaryAnd");
1482 
1483  const armnn::TensorShape shape{2, 1, 2, 2};
1484 
1487 
1489 
1491  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1492  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1493  armnn::IConnectableLayer* const logicalBinaryLayer = network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1494  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1495 
1496  inputLayer0->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(0));
1497  inputLayer1->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(1));
1498  logicalBinaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1499 
1500  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1501  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1502  logicalBinaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1503 
1504  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1505  CHECK(deserializedNetwork);
1506 
1508  layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
1509  deserializedNetwork->ExecuteStrategy(verifier);
1510 }
1511 
1512 TEST_CASE("SerializeLogSoftmax")
1513 {
1514  const std::string layerName("log_softmax");
1516 
1517  armnn::LogSoftmaxDescriptor descriptor;
1518  descriptor.m_Beta = 1.0f;
1519  descriptor.m_Axis = -1;
1520 
1522  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1523  armnn::IConnectableLayer* const logSoftmaxLayer = network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1524  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1525 
1526  inputLayer->GetOutputSlot(0).Connect(logSoftmaxLayer->GetInputSlot(0));
1527  logSoftmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1528 
1529  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1530  logSoftmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
1531 
1532  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1533  CHECK(deserializedNetwork);
1534 
1535  LayerVerifierBaseWithDescriptor<armnn::LogSoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
1536  deserializedNetwork->ExecuteStrategy(verifier);
1537 }
1538 
1539 TEST_CASE("SerializeMaximum")
1540 {
1541  const std::string layerName("maximum");
1542  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1543 
1545  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1546  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1547  armnn::IConnectableLayer* const maximumLayer = network->AddMaximumLayer(layerName.c_str());
1548  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1549 
1550  inputLayer0->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(0));
1551  inputLayer1->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(1));
1552  maximumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1553 
1554  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1555  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1556  maximumLayer->GetOutputSlot(0).SetTensorInfo(info);
1557 
1558  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1559  CHECK(deserializedNetwork);
1560 
1561  LayerVerifierBase verifier(layerName, {info, info}, {info});
1562  deserializedNetwork->ExecuteStrategy(verifier);
1563 }
1564 
1565 TEST_CASE("SerializeMean")
1566 {
1567  const std::string layerName("mean");
1568  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1569  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1570 
1571  armnn::MeanDescriptor descriptor;
1572  descriptor.m_Axis = { 2 };
1573  descriptor.m_KeepDims = true;
1574 
1576  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1577  armnn::IConnectableLayer* const meanLayer = network->AddMeanLayer(descriptor, layerName.c_str());
1578  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1579 
1580  inputLayer->GetOutputSlot(0).Connect(meanLayer->GetInputSlot(0));
1581  meanLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1582 
1583  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1584  meanLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1585 
1586  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1587  CHECK(deserializedNetwork);
1588 
1589  LayerVerifierBaseWithDescriptor<armnn::MeanDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1590  deserializedNetwork->ExecuteStrategy(verifier);
1591 }
1592 
1593 TEST_CASE("SerializeMerge")
1594 {
1595  const std::string layerName("merge");
1596  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1597 
1599  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1600  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1601  armnn::IConnectableLayer* const mergeLayer = network->AddMergeLayer(layerName.c_str());
1602  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1603 
1604  inputLayer0->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(0));
1605  inputLayer1->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(1));
1606  mergeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1607 
1608  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1609  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1610  mergeLayer->GetOutputSlot(0).SetTensorInfo(info);
1611 
1612  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1613  CHECK(deserializedNetwork);
1614 
1615  LayerVerifierBase verifier(layerName, {info, info}, {info});
1616  deserializedNetwork->ExecuteStrategy(verifier);
1617 }
1618 
1619 class MergerLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>
1620 {
1621 public:
1622  MergerLayerVerifier(const std::string& layerName,
1623  const std::vector<armnn::TensorInfo>& inputInfos,
1624  const std::vector<armnn::TensorInfo>& outputInfos,
1625  const armnn::OriginsDescriptor& descriptor)
1626  : LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1627 
1628  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1629  const armnn::BaseDescriptor& descriptor,
1630  const std::vector<armnn::ConstTensor>& constants,
1631  const char* name,
1632  const armnn::LayerBindingId id = 0) override
1633  {
1634  armnn::IgnoreUnused(descriptor, constants, id);
1635  switch (layer->GetType())
1636  {
1637  case armnn::LayerType::Input: break;
1638  case armnn::LayerType::Output: break;
1640  {
1641  throw armnn::Exception("MergerLayer should have translated to ConcatLayer");
1642  break;
1643  }
1645  {
1646  VerifyNameAndConnections(layer, name);
1647  const armnn::MergerDescriptor& layerDescriptor =
1648  static_cast<const armnn::MergerDescriptor&>(descriptor);
1649  VerifyDescriptor(layerDescriptor);
1650  break;
1651  }
1652  default:
1653  {
1654  throw armnn::Exception("Unexpected layer type in Merge test model");
1655  }
1656  }
1657  }
1658 };
1659 
1660 TEST_CASE("EnsureMergerLayerBackwardCompatibility")
1661 {
1662  // The hex data below is a flat buffer containing a simple network with two inputs
1663  // a merger layer (now deprecated) and an output layer with dimensions as per the tensor infos below.
1664  //
1665  // This test verifies that we can still read back these old style
1666  // models replacing the MergerLayers with ConcatLayers with the same parameters.
1667  const std::vector<uint8_t> mergerModel =
1668  {
1669  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1670  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1671  0x38, 0x02, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1672  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1673  0xF4, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1674  0x00, 0x00, 0x9A, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1675  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1676  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1677  0xF8, 0xFE, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1678  0x00, 0x1F, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1679  0x68, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1680  0x0C, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1681  0x02, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x22, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1682  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1683  0x00, 0x00, 0x00, 0x00, 0x3E, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1684  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1685  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1686  0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x6D, 0x65, 0x72, 0x67, 0x65, 0x72, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1687  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1688  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1689  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00,
1690  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1691  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1692  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1693  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1694  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1695  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1696  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1697  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1698  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1699  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1700  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1701  0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1702  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1703  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1704  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1705  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1706  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1707  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1708  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1709  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1710  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1711  0x02, 0x00, 0x00, 0x00
1712  };
1713 
1714  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(mergerModel.begin(), mergerModel.end()));
1715  CHECK(deserializedNetwork);
1716 
1717  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 2, 3, 2, 2 }, armnn::DataType::Float32);
1718  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 4, 3, 2, 2 }, armnn::DataType::Float32);
1719 
1720  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1721 
1722  armnn::OriginsDescriptor descriptor =
1723  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1724 
1725  MergerLayerVerifier verifier("merger", { inputInfo, inputInfo }, { outputInfo }, descriptor);
1726  deserializedNetwork->ExecuteStrategy(verifier);
1727 }
1728 
1729 TEST_CASE("SerializeConcat")
1730 {
1731  const std::string layerName("concat");
1732  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1733  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1734 
1735  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1736 
1737  armnn::OriginsDescriptor descriptor =
1738  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1739 
1741  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1742  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1743  armnn::IConnectableLayer* const concatLayer = network->AddConcatLayer(descriptor, layerName.c_str());
1744  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1745 
1746  inputLayerOne->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
1747  inputLayerTwo->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
1748  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1749 
1750  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1751  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1752  concatLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1753 
1754  std::string concatLayerNetwork = SerializeNetwork(*network);
1755  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(concatLayerNetwork);
1756  CHECK(deserializedNetwork);
1757 
1758  // NOTE: using the MergerLayerVerifier to ensure that it is a concat layer and not a
1759  // merger layer that gets placed into the graph.
1760  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1761  deserializedNetwork->ExecuteStrategy(verifier);
1762 }
1763 
1764 TEST_CASE("SerializeMinimum")
1765 {
1766  const std::string layerName("minimum");
1767  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1768 
1770  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1771  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1772  armnn::IConnectableLayer* const minimumLayer = network->AddMinimumLayer(layerName.c_str());
1773  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1774 
1775  inputLayer0->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(0));
1776  inputLayer1->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(1));
1777  minimumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1778 
1779  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1780  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1781  minimumLayer->GetOutputSlot(0).SetTensorInfo(info);
1782 
1783  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1784  CHECK(deserializedNetwork);
1785 
1786  LayerVerifierBase verifier(layerName, {info, info}, {info});
1787  deserializedNetwork->ExecuteStrategy(verifier);
1788 }
1789 
1790 TEST_CASE("SerializeMultiplication")
1791 {
1792  const std::string layerName("multiplication");
1793  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
1794 
1796  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1797  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1798  armnn::IConnectableLayer* const multiplicationLayer = network->AddMultiplicationLayer(layerName.c_str());
1799  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1800 
1801  inputLayer0->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
1802  inputLayer1->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
1803  multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1804 
1805  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1806  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1807  multiplicationLayer->GetOutputSlot(0).SetTensorInfo(info);
1808 
1809  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1810  CHECK(deserializedNetwork);
1811 
1812  LayerVerifierBase verifier(layerName, {info, info}, {info});
1813  deserializedNetwork->ExecuteStrategy(verifier);
1814 }
1815 
1816 TEST_CASE("SerializePrelu")
1817 {
1818  const std::string layerName("prelu");
1819 
1820  armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32);
1821  armnn::TensorInfo alphaTensorInfo ({ 5, 4, 3, 1 }, armnn::DataType::Float32);
1822  armnn::TensorInfo outputTensorInfo({ 5, 4, 3, 2 }, armnn::DataType::Float32);
1823 
1825  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1826  armnn::IConnectableLayer* const alphaLayer = network->AddInputLayer(1);
1827  armnn::IConnectableLayer* const preluLayer = network->AddPreluLayer(layerName.c_str());
1828  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1829 
1830  inputLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(0));
1831  alphaLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(1));
1832  preluLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1833 
1834  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1835  alphaLayer->GetOutputSlot(0).SetTensorInfo(alphaTensorInfo);
1836  preluLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1837 
1838  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1839  CHECK(deserializedNetwork);
1840 
1841  LayerVerifierBase verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
1842  deserializedNetwork->ExecuteStrategy(verifier);
1843 }
1844 
1845 TEST_CASE("SerializeNormalization")
1846 {
1847  const std::string layerName("normalization");
1848  const armnn::TensorInfo info({2, 1, 2, 2}, armnn::DataType::Float32);
1849 
1852  desc.m_NormSize = 3;
1853  desc.m_Alpha = 1;
1854  desc.m_Beta = 1;
1855  desc.m_K = 1;
1856 
1858  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1859  armnn::IConnectableLayer* const normalizationLayer = network->AddNormalizationLayer(desc, layerName.c_str());
1860  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1861 
1862  inputLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
1863  normalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1864 
1865  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1866  normalizationLayer->GetOutputSlot(0).SetTensorInfo(info);
1867 
1868  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1869  CHECK(deserializedNetwork);
1870 
1872  deserializedNetwork->ExecuteStrategy(verifier);
1873 }
1874 
1875 TEST_CASE("SerializePad")
1876 {
1877  const std::string layerName("pad");
1878  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1879  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1880 
1881  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1882 
1884  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1885  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1886  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1887 
1888  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1889  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1890 
1891  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1892  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1893 
1894  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1895  CHECK(deserializedNetwork);
1896 
1898  {inputTensorInfo},
1899  {outputTensorInfo},
1900  desc);
1901  deserializedNetwork->ExecuteStrategy(verifier);
1902 }
1903 
1904 TEST_CASE("SerializePadReflect")
1905 {
1906  const std::string layerName("padReflect");
1907  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1908  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1909 
1910  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1912 
1914  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1915  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1916  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1917 
1918  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1919  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1920 
1921  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1922  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1923 
1924  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1925  CHECK(deserializedNetwork);
1926 
1928  {inputTensorInfo},
1929  {outputTensorInfo},
1930  desc);
1931  deserializedNetwork->ExecuteStrategy(verifier);
1932 }
1933 
1934 TEST_CASE("EnsurePadBackwardCompatibility")
1935 {
1936  // The PadDescriptor is being extended with a float PadValue (so a value other than 0
1937  // can be used to pad the tensor.
1938  //
1939  // This test contains a binary representation of a simple input->pad->output network
1940  // prior to this change to test that the descriptor has been updated in a backward
1941  // compatible way with respect to Deserialization of older binary dumps
1942  const std::vector<uint8_t> padModel =
1943  {
1944  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1945  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1946  0x54, 0x01, 0x00, 0x00, 0x6C, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1947  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD0, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1948  0x04, 0x00, 0x00, 0x00, 0x96, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1949  0x00, 0x00, 0x72, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1950  0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
1951  0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00,
1952  0x00, 0x00, 0x00, 0x00, 0x24, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x16, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00,
1953  0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x4C, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1954  0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00,
1955  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1956  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
1957  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1958  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00,
1959  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x70, 0x61, 0x64, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00,
1960  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1961  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1962  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
1963  0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
1964  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00,
1965  0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00,
1966  0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1967  0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00,
1968  0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1969  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1970  0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1971  0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
1972  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
1973  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00
1974  };
1975 
1976  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(padModel.begin(), padModel.end()));
1977  CHECK(deserializedNetwork);
1978 
1979  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 1, 2, 3, 4 }, armnn::DataType::Float32);
1980  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 1, 3, 5, 7 }, armnn::DataType::Float32);
1981 
1982  armnn::PadDescriptor descriptor({{ 0, 0 }, { 1, 0 }, { 1, 1 }, { 1, 2 }});
1983 
1984  LayerVerifierBaseWithDescriptor<armnn::PadDescriptor> verifier("pad", { inputInfo }, { outputInfo }, descriptor);
1985  deserializedNetwork->ExecuteStrategy(verifier);
1986 }
1987 
1988 TEST_CASE("SerializePermute")
1989 {
1990  const std::string layerName("permute");
1991  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
1992  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1993 
1994  armnn::PermuteDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
1995 
1997  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1998  armnn::IConnectableLayer* const permuteLayer = network->AddPermuteLayer(descriptor, layerName.c_str());
1999  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2000 
2001  inputLayer->GetOutputSlot(0).Connect(permuteLayer->GetInputSlot(0));
2002  permuteLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2003 
2004  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2005  permuteLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2006 
2007  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2008  CHECK(deserializedNetwork);
2009 
2011  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
2012  deserializedNetwork->ExecuteStrategy(verifier);
2013 }
2014 
2015 TEST_CASE("SerializePooling2d")
2016 {
2017  const std::string layerName("pooling2d");
2018  const armnn::TensorInfo inputInfo({1, 2, 2, 1}, armnn::DataType::Float32);
2019  const armnn::TensorInfo outputInfo({1, 1, 1, 1}, armnn::DataType::Float32);
2020 
2023  desc.m_PadTop = 0;
2024  desc.m_PadBottom = 0;
2025  desc.m_PadLeft = 0;
2026  desc.m_PadRight = 0;
2027  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2028  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2029  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2030  desc.m_PoolHeight = 2;
2031  desc.m_PoolWidth = 2;
2032  desc.m_StrideX = 2;
2033  desc.m_StrideY = 2;
2034 
2036  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2037  armnn::IConnectableLayer* const pooling2dLayer = network->AddPooling2dLayer(desc, layerName.c_str());
2038  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2039 
2040  inputLayer->GetOutputSlot(0).Connect(pooling2dLayer->GetInputSlot(0));
2041  pooling2dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2042 
2043  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2044  pooling2dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2045 
2046  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2047  CHECK(deserializedNetwork);
2048 
2050  layerName, {inputInfo}, {outputInfo}, desc);
2051  deserializedNetwork->ExecuteStrategy(verifier);
2052 }
2053 
2054 TEST_CASE("SerializePooling3d")
2055 {
2056  const std::string layerName("pooling3d");
2057  const armnn::TensorInfo inputInfo({1, 1, 2, 2, 2}, armnn::DataType::Float32);
2058  const armnn::TensorInfo outputInfo({1, 1, 1, 1, 1}, armnn::DataType::Float32);
2059 
2062  desc.m_PadFront = 0;
2063  desc.m_PadBack = 0;
2064  desc.m_PadTop = 0;
2065  desc.m_PadBottom = 0;
2066  desc.m_PadLeft = 0;
2067  desc.m_PadRight = 0;
2068  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2069  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2070  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2071  desc.m_PoolHeight = 2;
2072  desc.m_PoolWidth = 2;
2073  desc.m_PoolDepth = 2;
2074  desc.m_StrideX = 2;
2075  desc.m_StrideY = 2;
2076  desc.m_StrideZ = 2;
2077 
2079  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2080  armnn::IConnectableLayer* const pooling3dLayer = network->AddPooling3dLayer(desc, layerName.c_str());
2081  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2082 
2083  inputLayer->GetOutputSlot(0).Connect(pooling3dLayer->GetInputSlot(0));
2084  pooling3dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2085 
2086  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2087  pooling3dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2088 
2089  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2090  CHECK(deserializedNetwork);
2091 
2093  layerName, {inputInfo}, {outputInfo}, desc);
2094  deserializedNetwork->ExecuteStrategy(verifier);
2095 }
2096 
2097 TEST_CASE("SerializeQuantize")
2098 {
2099  const std::string layerName("quantize");
2100  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
2101 
2103  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2104  armnn::IConnectableLayer* const quantizeLayer = network->AddQuantizeLayer(layerName.c_str());
2105  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2106 
2107  inputLayer->GetOutputSlot(0).Connect(quantizeLayer->GetInputSlot(0));
2108  quantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2109 
2110  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2111  quantizeLayer->GetOutputSlot(0).SetTensorInfo(info);
2112 
2113  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2114  CHECK(deserializedNetwork);
2115 
2116  LayerVerifierBase verifier(layerName, {info}, {info});
2117  deserializedNetwork->ExecuteStrategy(verifier);
2118 }
2119 
2120 TEST_CASE("SerializeRank")
2121 {
2122  const std::string layerName("rank");
2123  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
2124  const armnn::TensorInfo outputInfo({1}, armnn::DataType::Signed32);
2125 
2127  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2128  armnn::IConnectableLayer* const rankLayer = network->AddRankLayer(layerName.c_str());
2129  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2130 
2131  inputLayer->GetOutputSlot(0).Connect(rankLayer->GetInputSlot(0));
2132  rankLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2133 
2134  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2135  rankLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2136 
2137  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2138  CHECK(deserializedNetwork);
2139 
2140  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
2141  deserializedNetwork->ExecuteStrategy(verifier);
2142 }
2143 
2144 TEST_CASE("SerializeReduceSum")
2145 {
2146  const std::string layerName("Reduce_Sum");
2147  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
2148  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
2149 
2150  armnn::ReduceDescriptor descriptor;
2151  descriptor.m_vAxis = { 2 };
2152  descriptor.m_ReduceOperation = armnn::ReduceOperation::Sum;
2153 
2155  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2156  armnn::IConnectableLayer* const reduceSumLayer = network->AddReduceLayer(descriptor, layerName.c_str());
2157  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2158 
2159  inputLayer->GetOutputSlot(0).Connect(reduceSumLayer->GetInputSlot(0));
2160  reduceSumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2161 
2162  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2163  reduceSumLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2164 
2165  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2166  CHECK(deserializedNetwork);
2167 
2168  LayerVerifierBaseWithDescriptor<armnn::ReduceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2169  deserializedNetwork->ExecuteStrategy(verifier);
2170 }
2171 
2172 TEST_CASE("SerializeReshape")
2173 {
2174  const std::string layerName("reshape");
2175  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
2176  const armnn::TensorInfo outputInfo({3, 3}, armnn::DataType::Float32);
2177 
2178  armnn::ReshapeDescriptor descriptor({3, 3});
2179 
2181  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2182  armnn::IConnectableLayer* const reshapeLayer = network->AddReshapeLayer(descriptor, layerName.c_str());
2183  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2184 
2185  inputLayer->GetOutputSlot(0).Connect(reshapeLayer->GetInputSlot(0));
2186  reshapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2187 
2188  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2189  reshapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2190 
2191  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2192  CHECK(deserializedNetwork);
2193 
2195  layerName, {inputInfo}, {outputInfo}, descriptor);
2196  deserializedNetwork->ExecuteStrategy(verifier);
2197 }
2198 
2199 TEST_CASE("SerializeResize")
2200 {
2201  const std::string layerName("resize");
2202  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2203  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2204 
2206  desc.m_TargetWidth = 4;
2207  desc.m_TargetHeight = 2;
2208  desc.m_Method = armnn::ResizeMethod::NearestNeighbor;
2209  desc.m_AlignCorners = true;
2210  desc.m_HalfPixelCenters = true;
2211 
2213  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2214  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
2215  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2216 
2217  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2218  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2219 
2220  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2221  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2222 
2223  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2224  CHECK(deserializedNetwork);
2225 
2226  LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
2227  deserializedNetwork->ExecuteStrategy(verifier);
2228 }
2229 
2230 class ResizeBilinearLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor>
2231 {
2232 public:
2233  ResizeBilinearLayerVerifier(const std::string& layerName,
2234  const std::vector<armnn::TensorInfo>& inputInfos,
2235  const std::vector<armnn::TensorInfo>& outputInfos,
2236  const armnn::ResizeDescriptor& descriptor)
2238  layerName, inputInfos, outputInfos, descriptor) {}
2239 
2240  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2241  const armnn::BaseDescriptor& descriptor,
2242  const std::vector<armnn::ConstTensor>& constants,
2243  const char* name,
2244  const armnn::LayerBindingId id = 0) override
2245  {
2246  armnn::IgnoreUnused(descriptor, constants, id);
2247  switch (layer->GetType())
2248  {
2249  case armnn::LayerType::Input: break;
2250  case armnn::LayerType::Output: break;
2252  {
2253  VerifyNameAndConnections(layer, name);
2254  const armnn::ResizeDescriptor& layerDescriptor =
2255  static_cast<const armnn::ResizeDescriptor&>(descriptor);
2256  CHECK(layerDescriptor.m_Method == armnn::ResizeMethod::Bilinear);
2257  CHECK(layerDescriptor.m_TargetWidth == m_Descriptor.m_TargetWidth);
2258  CHECK(layerDescriptor.m_TargetHeight == m_Descriptor.m_TargetHeight);
2259  CHECK(layerDescriptor.m_DataLayout == m_Descriptor.m_DataLayout);
2260  CHECK(layerDescriptor.m_AlignCorners == m_Descriptor.m_AlignCorners);
2261  CHECK(layerDescriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters);
2262  break;
2263  }
2264  default:
2265  {
2266  throw armnn::Exception("Unexpected layer type in test model. ResizeBiliniar "
2267  "should have translated to Resize");
2268  }
2269  }
2270  }
2271 };
2272 
2273 TEST_CASE("SerializeResizeBilinear")
2274 {
2275  const std::string layerName("resizeBilinear");
2276  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2277  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2278 
2281  desc.m_TargetWidth = 4u;
2282  desc.m_TargetHeight = 2u;
2283  desc.m_AlignCorners = true;
2284  desc.m_HalfPixelCenters = true;
2285 
2287  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2288  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
2289  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2290 
2291  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2292  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2293 
2294  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2295  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2296 
2297  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2298  CHECK(deserializedNetwork);
2299 
2300  ResizeBilinearLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2301  deserializedNetwork->ExecuteStrategy(verifier);
2302 }
2303 
2304 TEST_CASE("EnsureResizeBilinearBackwardCompatibility")
2305 {
2306  // The hex data below is a flat buffer containing a simple network with an input,
2307  // a ResizeBilinearLayer (now deprecated and removed) and an output
2308  //
2309  // This test verifies that we can still deserialize this old-style model by replacing
2310  // the ResizeBilinearLayer with an equivalent ResizeLayer
2311  const std::vector<uint8_t> resizeBilinearModel =
2312  {
2313  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
2314  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
2315  0x50, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
2316  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD4, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
2317  0x04, 0x00, 0x00, 0x00, 0xC2, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
2318  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x8A, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
2319  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
2320  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2321  0x38, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
2322  0x00, 0x1A, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
2323  0x34, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x12, 0x00, 0x08, 0x00, 0x0C, 0x00,
2324  0x07, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
2325  0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00,
2326  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00,
2327  0x20, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x69, 0x7A, 0x65, 0x42, 0x69, 0x6C, 0x69,
2328  0x6E, 0x65, 0x61, 0x72, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
2329  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
2330  0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2331  0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
2332  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2333  0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
2334  0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
2335  0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00,
2336  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
2337  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2338  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00,
2339  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00,
2340  0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
2341  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
2342  0x00, 0x00, 0x05, 0x00, 0x00, 0x00
2343  };
2344 
2345  armnn::INetworkPtr deserializedNetwork =
2346  DeserializeNetwork(std::string(resizeBilinearModel.begin(), resizeBilinearModel.end()));
2347  CHECK(deserializedNetwork);
2348 
2349  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2350  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2351 
2352  armnn::ResizeDescriptor descriptor;
2353  descriptor.m_TargetWidth = 4u;
2354  descriptor.m_TargetHeight = 2u;
2355 
2356  ResizeBilinearLayerVerifier verifier("resizeBilinear", { inputInfo }, { outputInfo }, descriptor);
2357  deserializedNetwork->ExecuteStrategy(verifier);
2358 }
2359 
2360 TEST_CASE("SerializeShape")
2361 {
2362  const std::string layerName("shape");
2363  const armnn::TensorInfo inputInfo({1, 3, 3, 1}, armnn::DataType::Signed32);
2364  const armnn::TensorInfo outputInfo({ 4 }, armnn::DataType::Signed32);
2365 
2367  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2368  armnn::IConnectableLayer* const shapeLayer = network->AddShapeLayer(layerName.c_str());
2369  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2370 
2371  inputLayer->GetOutputSlot(0).Connect(shapeLayer->GetInputSlot(0));
2372  shapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2373 
2374  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2375  shapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2376 
2377  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2378  CHECK(deserializedNetwork);
2379 
2380  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
2381 
2382  deserializedNetwork->ExecuteStrategy(verifier);
2383 }
2384 
2385 TEST_CASE("SerializeSlice")
2386 {
2387  const std::string layerName{"slice"};
2388 
2389  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2390  const armnn::TensorInfo outputInfo = armnn::TensorInfo({2, 2, 2, 1}, armnn::DataType::Float32);
2391 
2392  armnn::SliceDescriptor descriptor({ 0, 0, 1, 0}, {2, 2, 2, 1});
2393 
2395 
2396  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2397  armnn::IConnectableLayer* const sliceLayer = network->AddSliceLayer(descriptor, layerName.c_str());
2398  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2399 
2400  inputLayer->GetOutputSlot(0).Connect(sliceLayer->GetInputSlot(0));
2401  sliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2402 
2403  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2404  sliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2405 
2406  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2407  CHECK(deserializedNetwork);
2408 
2409  LayerVerifierBaseWithDescriptor<armnn::SliceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2410  deserializedNetwork->ExecuteStrategy(verifier);
2411 }
2412 
2413 TEST_CASE("SerializeSoftmax")
2414 {
2415  const std::string layerName("softmax");
2417 
2418  armnn::SoftmaxDescriptor descriptor;
2419  descriptor.m_Beta = 1.0f;
2420 
2422  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2423  armnn::IConnectableLayer* const softmaxLayer = network->AddSoftmaxLayer(descriptor, layerName.c_str());
2424  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2425 
2426  inputLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
2427  softmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2428 
2429  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2430  softmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
2431 
2432  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2433  CHECK(deserializedNetwork);
2434 
2435  LayerVerifierBaseWithDescriptor<armnn::SoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
2436  deserializedNetwork->ExecuteStrategy(verifier);
2437 }
2438 
2439 TEST_CASE("SerializeSpaceToBatchNd")
2440 {
2441  const std::string layerName("spaceToBatchNd");
2442  const armnn::TensorInfo inputInfo({2, 1, 2, 4}, armnn::DataType::Float32);
2443  const armnn::TensorInfo outputInfo({8, 1, 1, 3}, armnn::DataType::Float32);
2444 
2447  desc.m_BlockShape = {2, 2};
2448  desc.m_PadList = {{0, 0}, {2, 0}};
2449 
2451  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2452  armnn::IConnectableLayer* const spaceToBatchNdLayer = network->AddSpaceToBatchNdLayer(desc, layerName.c_str());
2453  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2454 
2455  inputLayer->GetOutputSlot(0).Connect(spaceToBatchNdLayer->GetInputSlot(0));
2456  spaceToBatchNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2457 
2458  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2459  spaceToBatchNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2460 
2461  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2462  CHECK(deserializedNetwork);
2463 
2465  layerName, {inputInfo}, {outputInfo}, desc);
2466  deserializedNetwork->ExecuteStrategy(verifier);
2467 }
2468 
2469 TEST_CASE("SerializeSpaceToDepth")
2470 {
2471  const std::string layerName("spaceToDepth");
2472 
2473  const armnn::TensorInfo inputInfo ({ 1, 16, 8, 3 }, armnn::DataType::Float32);
2474  const armnn::TensorInfo outputInfo({ 1, 8, 4, 12 }, armnn::DataType::Float32);
2475 
2477  desc.m_BlockSize = 2;
2478  desc.m_DataLayout = armnn::DataLayout::NHWC;
2479 
2481  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2482  armnn::IConnectableLayer* const spaceToDepthLayer = network->AddSpaceToDepthLayer(desc, layerName.c_str());
2483  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2484 
2485  inputLayer->GetOutputSlot(0).Connect(spaceToDepthLayer->GetInputSlot(0));
2486  spaceToDepthLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2487 
2488  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2489  spaceToDepthLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2490 
2491  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2492  CHECK(deserializedNetwork);
2493 
2495  layerName, {inputInfo}, {outputInfo}, desc);
2496  deserializedNetwork->ExecuteStrategy(verifier);
2497 }
2498 
2499 TEST_CASE("SerializeSplitter")
2500 {
2501  const unsigned int numViews = 3;
2502  const unsigned int numDimensions = 4;
2503  const unsigned int inputShape[] = {1, 18, 4, 4};
2504  const unsigned int outputShape[] = {1, 6, 4, 4};
2505 
2506  // This is modelled on how the caffe parser sets up a splitter layer to partition an input along dimension one.
2507  unsigned int splitterDimSizes[4] = {static_cast<unsigned int>(inputShape[0]),
2508  static_cast<unsigned int>(inputShape[1]),
2509  static_cast<unsigned int>(inputShape[2]),
2510  static_cast<unsigned int>(inputShape[3])};
2511  splitterDimSizes[1] /= numViews;
2512  armnn::ViewsDescriptor desc(numViews, numDimensions);
2513 
2514  for (unsigned int g = 0; g < numViews; ++g)
2515  {
2516  desc.SetViewOriginCoord(g, 1, splitterDimSizes[1] * g);
2517 
2518  for (unsigned int dimIdx=0; dimIdx < 4; dimIdx++)
2519  {
2520  desc.SetViewSize(g, dimIdx, splitterDimSizes[dimIdx]);
2521  }
2522  }
2523 
2524  const std::string layerName("splitter");
2525  const armnn::TensorInfo inputInfo(numDimensions, inputShape, armnn::DataType::Float32);
2526  const armnn::TensorInfo outputInfo(numDimensions, outputShape, armnn::DataType::Float32);
2527 
2529  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2530  armnn::IConnectableLayer* const splitterLayer = network->AddSplitterLayer(desc, layerName.c_str());
2531  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2532  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2533  armnn::IConnectableLayer* const outputLayer2 = network->AddOutputLayer(2);
2534 
2535  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
2536  splitterLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2537  splitterLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2538  splitterLayer->GetOutputSlot(2).Connect(outputLayer2->GetInputSlot(0));
2539 
2540  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2541  splitterLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2542  splitterLayer->GetOutputSlot(1).SetTensorInfo(outputInfo);
2543  splitterLayer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2544 
2545  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2546  CHECK(deserializedNetwork);
2547 
2549  layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc);
2550  deserializedNetwork->ExecuteStrategy(verifier);
2551 }
2552 
2553 TEST_CASE("SerializeStack")
2554 {
2555  const std::string layerName("stack");
2556 
2557  armnn::TensorInfo inputTensorInfo ({4, 3, 5}, armnn::DataType::Float32);
2558  armnn::TensorInfo outputTensorInfo({4, 3, 2, 5}, armnn::DataType::Float32);
2559 
2560  armnn::StackDescriptor descriptor(2, 2, {4, 3, 5});
2561 
2563  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
2564  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
2565  armnn::IConnectableLayer* const stackLayer = network->AddStackLayer(descriptor, layerName.c_str());
2566  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2567 
2568  inputLayer1->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(0));
2569  inputLayer2->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(1));
2570  stackLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2571 
2572  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2573  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2574  stackLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2575 
2576  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2577  CHECK(deserializedNetwork);
2578 
2580  layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor);
2581  deserializedNetwork->ExecuteStrategy(verifier);
2582 }
2583 
2584 TEST_CASE("SerializeStandIn")
2585 {
2586  const std::string layerName("standIn");
2587 
2588  armnn::TensorInfo tensorInfo({ 1u }, armnn::DataType::Float32);
2589  armnn::StandInDescriptor descriptor(2u, 2u);
2590 
2592  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2593  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2594  armnn::IConnectableLayer* const standInLayer = network->AddStandInLayer(descriptor, layerName.c_str());
2595  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2596  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2597 
2598  inputLayer0->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(0));
2599  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2600 
2601  inputLayer1->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(1));
2602  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2603 
2604  standInLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2605  standInLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2606 
2607  standInLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2608  standInLayer->GetOutputSlot(1).SetTensorInfo(tensorInfo);
2609 
2610  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2611  CHECK(deserializedNetwork);
2612 
2614  layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor);
2615  deserializedNetwork->ExecuteStrategy(verifier);
2616 }
2617 
2618 TEST_CASE("SerializeStridedSlice")
2619 {
2620  const std::string layerName("stridedSlice");
2621  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2622  const armnn::TensorInfo outputInfo = armnn::TensorInfo({3, 1}, armnn::DataType::Float32);
2623 
2624  armnn::StridedSliceDescriptor desc({0, 0, 1, 0}, {1, 1, 1, 1}, {1, 1, 1, 1});
2625  desc.m_EndMask = (1 << 4) - 1;
2626  desc.m_ShrinkAxisMask = (1 << 1) | (1 << 2);
2627  desc.m_DataLayout = armnn::DataLayout::NCHW;
2628 
2630  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2631  armnn::IConnectableLayer* const stridedSliceLayer = network->AddStridedSliceLayer(desc, layerName.c_str());
2632  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2633 
2634  inputLayer->GetOutputSlot(0).Connect(stridedSliceLayer->GetInputSlot(0));
2635  stridedSliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2636 
2637  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2638  stridedSliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2639 
2640  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2641  CHECK(deserializedNetwork);
2642 
2644  layerName, {inputInfo}, {outputInfo}, desc);
2645  deserializedNetwork->ExecuteStrategy(verifier);
2646 }
2647 
2648 TEST_CASE("SerializeSubtraction")
2649 {
2650  const std::string layerName("subtraction");
2652 
2654  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2655  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2656  armnn::IConnectableLayer* const subtractionLayer = network->AddSubtractionLayer(layerName.c_str());
2657  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2658 
2659  inputLayer0->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(0));
2660  inputLayer1->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(1));
2661  subtractionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2662 
2663  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
2664  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
2665  subtractionLayer->GetOutputSlot(0).SetTensorInfo(info);
2666 
2667  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2668  CHECK(deserializedNetwork);
2669 
2670  LayerVerifierBase verifier(layerName, {info, info}, {info});
2671  deserializedNetwork->ExecuteStrategy(verifier);
2672 }
2673 
2674 TEST_CASE("SerializeSwitch")
2675 {
2676  class SwitchLayerVerifier : public LayerVerifierBase
2677  {
2678  public:
2679  SwitchLayerVerifier(const std::string& layerName,
2680  const std::vector<armnn::TensorInfo>& inputInfos,
2681  const std::vector<armnn::TensorInfo>& outputInfos)
2682  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
2683 
2684  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2685  const armnn::BaseDescriptor& descriptor,
2686  const std::vector<armnn::ConstTensor>& constants,
2687  const char* name,
2688  const armnn::LayerBindingId id = 0) override
2689  {
2690  armnn::IgnoreUnused(descriptor, constants, id);
2691  switch (layer->GetType())
2692  {
2693  case armnn::LayerType::Input: break;
2694  case armnn::LayerType::Output: break;
2695  case armnn::LayerType::Constant: break;
2697  {
2698  VerifyNameAndConnections(layer, name);
2699  break;
2700  }
2701  default:
2702  {
2703  throw armnn::Exception("Unexpected layer type in Switch test model");
2704  }
2705  }
2706  }
2707  };
2708 
2709  const std::string layerName("switch");
2710  const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32, 0.0f, 0, true);
2711 
2712  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2713  armnn::ConstTensor constTensor(info, constantData);
2714 
2716  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2717  armnn::IConnectableLayer* const constantLayer = network->AddConstantLayer(constTensor, "constant");
2718  armnn::IConnectableLayer* const switchLayer = network->AddSwitchLayer(layerName.c_str());
2719  armnn::IConnectableLayer* const trueOutputLayer = network->AddOutputLayer(0);
2720  armnn::IConnectableLayer* const falseOutputLayer = network->AddOutputLayer(1);
2721 
2722  inputLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(0));
2723  constantLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(1));
2724  switchLayer->GetOutputSlot(0).Connect(trueOutputLayer->GetInputSlot(0));
2725  switchLayer->GetOutputSlot(1).Connect(falseOutputLayer->GetInputSlot(0));
2726 
2727  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2728  constantLayer->GetOutputSlot(0).SetTensorInfo(info);
2729  switchLayer->GetOutputSlot(0).SetTensorInfo(info);
2730  switchLayer->GetOutputSlot(1).SetTensorInfo(info);
2731 
2732  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2733  CHECK(deserializedNetwork);
2734 
2735  SwitchLayerVerifier verifier(layerName, {info, info}, {info, info});
2736  deserializedNetwork->ExecuteStrategy(verifier);
2737 }
2738 
2739 TEST_CASE("SerializeTranspose")
2740 {
2741  const std::string layerName("transpose");
2742  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
2743  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
2744 
2745  armnn::TransposeDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
2746 
2748  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2749  armnn::IConnectableLayer* const transposeLayer = network->AddTransposeLayer(descriptor, layerName.c_str());
2750  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2751 
2752  inputLayer->GetOutputSlot(0).Connect(transposeLayer->GetInputSlot(0));
2753  transposeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2754 
2755  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2756  transposeLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2757 
2758  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2759  CHECK(deserializedNetwork);
2760 
2762  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
2763  deserializedNetwork->ExecuteStrategy(verifier);
2764 }
2765 
2766 TEST_CASE("SerializeTransposeConvolution2d")
2767 {
2768  const std::string layerName("transposeConvolution2d");
2769  const armnn::TensorInfo inputInfo ({ 1, 7, 7, 1 }, armnn::DataType::Float32);
2770  const armnn::TensorInfo outputInfo({ 1, 9, 9, 1 }, armnn::DataType::Float32);
2771 
2772  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
2773  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
2774 
2775  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
2776  armnn::ConstTensor weights(weightsInfo, weightsData);
2777 
2778  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
2779  armnn::ConstTensor biases(biasesInfo, biasesData);
2780 
2782  descriptor.m_PadLeft = 1;
2783  descriptor.m_PadRight = 1;
2784  descriptor.m_PadTop = 1;
2785  descriptor.m_PadBottom = 1;
2786  descriptor.m_StrideX = 1;
2787  descriptor.m_StrideY = 1;
2788  descriptor.m_BiasEnabled = true;
2790 
2792  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2793  armnn::IConnectableLayer* const convLayer =
2794  network->AddTransposeConvolution2dLayer(descriptor,
2795  weights,
2797  layerName.c_str());
2798  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2799 
2800  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2801  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2802 
2803  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2804  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2805 
2806  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2807  CHECK(deserializedNetwork);
2808 
2809  const std::vector<armnn::ConstTensor> constants {weights, biases};
2811  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
2812  deserializedNetwork->ExecuteStrategy(verifier);
2813 }
2814 
2815 TEST_CASE("SerializeDeserializeNonLinearNetwork")
2816 {
2817  class ConstantLayerVerifier : public LayerVerifierBase
2818  {
2819  public:
2820  ConstantLayerVerifier(const std::string& layerName,
2821  const std::vector<armnn::TensorInfo>& inputInfos,
2822  const std::vector<armnn::TensorInfo>& outputInfos,
2823  const armnn::ConstTensor& layerInput)
2824  : LayerVerifierBase(layerName, inputInfos, outputInfos)
2825  , m_LayerInput(layerInput) {}
2826 
2827  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2828  const armnn::BaseDescriptor& descriptor,
2829  const std::vector<armnn::ConstTensor>& constants,
2830  const char* name,
2831  const armnn::LayerBindingId id = 0) override
2832  {
2833  armnn::IgnoreUnused(descriptor, constants, id);
2834  switch (layer->GetType())
2835  {
2836  case armnn::LayerType::Input: break;
2837  case armnn::LayerType::Output: break;
2838  case armnn::LayerType::Addition: break;
2840  {
2841  VerifyNameAndConnections(layer, name);
2842  CompareConstTensor(constants.at(0), m_LayerInput);
2843  break;
2844  }
2845  default:
2846  {
2847  throw armnn::Exception("Unexpected layer type in test model");
2848  }
2849  }
2850  }
2851 
2852  private:
2853  armnn::ConstTensor m_LayerInput;
2854  };
2855 
2856  const std::string layerName("constant");
2857  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
2858 
2859  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2860  armnn::ConstTensor constTensor(info, constantData);
2861 
2863  armnn::IConnectableLayer* input = network->AddInputLayer(0);
2864  armnn::IConnectableLayer* add = network->AddAdditionLayer();
2865  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
2866  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
2867 
2868  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
2869  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
2870  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2871 
2872  input->GetOutputSlot(0).SetTensorInfo(info);
2873  constant->GetOutputSlot(0).SetTensorInfo(info);
2874  add->GetOutputSlot(0).SetTensorInfo(info);
2875 
2876  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2877  CHECK(deserializedNetwork);
2878 
2879  ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
2880  deserializedNetwork->ExecuteStrategy(verifier);
2881 }
2882 
2883 }
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:68
float m_ScaleW
Center size encoding scale weight.
bool m_BiasEnabled
Enable/disable bias.
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
uint32_t m_PadBottom
Padding bottom value in the height dimension.
A ReshapeDescriptor for the ReshapeLayer.
armnn::INetworkPtr DeserializeNetwork(const std::string &serializerString)
uint32_t m_PadBack
Padding back value in the depth dimension.
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ComparisonDescriptor for the ComparisonLayer.
Definition: Descriptors.hpp:89
float m_ScaleX
Center size encoding scale x.
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A Convolution2dDescriptor for the Convolution2dLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
bool m_BiasEnabled
Enable/disable bias.
ResizeMethod m_Method
The Interpolation method to use (Bilinear, NearestNeighbor).
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
float m_Beta
Exponentiation value.
The padding fields don&#39;t count and are ignored.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
Definition: Descriptors.hpp:81
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
A LogicalBinaryDescriptor for the LogicalBinaryLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Copyright (c) 2021 ARM Limited and Contributors.
DataLayout m_DataLayout
The data layout to be used (NCDHW, NDHWC).
void IgnoreUnused(Ts &&...)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_DilationY
Dilation along y axis.
int32_t m_EndMask
End mask value.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_DilationY
Dilation factor value for height dimension.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:290
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A ResizeBilinearDescriptor for the ResizeBilinearLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
Base class for all descriptors.
Definition: Descriptors.hpp:22
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
A StackDescriptor for the StackLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
constexpr char const * GetUnaryOperationAsCString(UnaryOperation operation)
Definition: TypesUtils.hpp:71
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
A PadDescriptor for the PadLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
DataType
Definition: Types.hpp:48
float m_NmsIouThreshold
Intersection over union threshold.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
uint32_t m_DilationX
Dilation factor value for width dimension.
uint32_t m_PadTop
Padding top value in the height dimension.
Status SetViewSize(uint32_t view, uint32_t coord, uint32_t value)
Set the size of the views.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A L2NormalizationDescriptor for the L2NormalizationLayer.
void VerifyNameAndConnections(const armnn::IConnectableLayer *layer, const char *name)
An ArgMinMaxDescriptor for ArgMinMaxLayer.
Definition: Descriptors.hpp:67
An OriginsDescriptor for the ConcatLayer.
A ReduceDescriptor for the REDUCE operators.
A FullyConnectedDescriptor for the FullyConnectedLayer.
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
uint32_t m_TargetWidth
Target width value.
A GatherDescriptor for the GatherLayer.
uint32_t m_NumClasses
Number of classes.
bool m_HalfPixelCenters
Half Pixel Centers.
uint32_t m_PadTop
Padding top value in the height dimension.
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:473
A StandInDescriptor for the StandIn layer.
LayerVerifierBase(const std::string &layerName, const std::vector< armnn::TensorInfo > &inputInfos, const std::vector< armnn::TensorInfo > &outputInfos)
bool m_UseRegularNms
Use Regular NMS.
uint32_t m_PadFront
Padding front value in the depth dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadLeft
Padding left value in the width dimension.
uint32_t m_TargetHeight
Target height value.
A SliceDescriptor for the SliceLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A Convolution3dDescriptor for the Convolution3dLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
PaddingMode m_PaddingMode
Specifies the Padding mode (Constant, Reflect or Symmetric)
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A Pooling3dDescriptor for the Pooling3dLayer.
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
float m_ScaleH
Center size encoding scale height.
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
DataLayout m_DataLayout
The data layout to be used (NDHWC, NCDHW).
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_PadLeft
Padding left value in the width dimension.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
bool m_AlignCorners
Aligned corners.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int32_t m_Axis
The axis in params to gather indices from.
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
TEST_SUITE("SerializerTests")
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
uint32_t m_PadTop
Padding top value in the height dimension.
void CompareConstTensor(const armnn::ConstTensor &tensor1, const armnn::ConstTensor &tensor2)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
A MeanDescriptor for the MeanLayer.
UnaryOperation
Definition: Types.hpp:124
uint32_t m_PadRight
Padding right value in the width dimension.
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
float m_ScaleY
Center size encoding scale y.
OriginsDescriptor CreateDescriptorForConcatenation(TensorShapeIt first, TensorShapeIt last, unsigned int concatenationDimension)
Convenience template to create an OriginsDescriptor to use when creating a ConcatLayer for performing...
float m_NmsScoreThreshold
NMS score threshold.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:238
virtual int Connect(IInputSlot &destination)=0
A Pooling2dDescriptor for the Pooling2dLayer.
std::string SerializeNetwork(const armnn::INetwork &network)
A NormalizationDescriptor for the NormalizationLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
A ChannelShuffleDescriptor for the ChannelShuffle operator.
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:475
uint32_t m_DilationZ
Dilation along z axis.
A SoftmaxDescriptor for the SoftmaxLayer.
uint32_t m_StrideZ
Stride value when proceeding through input for the depth dimension.
Status SetViewOriginCoord(uint32_t view, uint32_t coord, uint32_t value)
Set the view origin coordinates.
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
uint32_t m_DilationY
Dilation along y axis.
A FillDescriptor for the FillLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A PermuteDescriptor for the PermuteLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
bool m_ConstantWeights
Enable/disable constant weights and biases.