ArmNN
 22.05
SerializerTests.cpp File Reference
#include "../Serializer.hpp"
#include "SerializerTestUtils.hpp"
#include <armnn/Descriptors.hpp>
#include <armnn/INetwork.hpp>
#include <armnn/TypesUtils.hpp>
#include <armnn/LstmParams.hpp>
#include <armnn/QuantizedLstmParams.hpp>
#include <armnnDeserializer/IDeserializer.hpp>
#include <armnn/utility/IgnoreUnused.hpp>
#include <random>
#include <vector>
#include <doctest/doctest.h>

Go to the source code of this file.

Functions

 TEST_SUITE ("SerializerTests")
 

Function Documentation

◆ TEST_SUITE()

TEST_SUITE ( "SerializerTests"  )

Definition at line 24 of file SerializerTests.cpp.

References armnn::Addition, ARMNN_NO_DEPRECATE_WARN_BEGIN, ARMNN_NO_DEPRECATE_WARN_END, armnn::Average, armnn::Bilinear, armnn::Boolean, CompareConstTensor(), armnn::Concat, IOutputSlot::Connect(), armnn::Constant, INetwork::Create(), armnn::CreateDescriptorForConcatenation(), DeserializeNetwork(), armnn::Equal, armnn::Exclude, LayerVerifierBase::ExecuteStrategy(), LayerVerifierBaseWithDescriptor< Descriptor >::ExecuteStrategy(), LayerVerifierBaseWithDescriptorAndConstants< Descriptor >::ExecuteStrategy(), armnn::Float32, armnn::Floor, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), TensorInfo::GetShape(), IConnectableLayer::GetType(), armnn::GetUnaryOperationAsCString(), armnn::Greater, armnn::IgnoreUnused(), armnn::info, armnn::Input, LayerVerifierBase::LayerVerifierBase(), armnn::LogicalAnd, ResizeDescriptor::m_AlignCorners, GatherDescriptor::m_Axis, MeanDescriptor::m_Axis, SoftmaxDescriptor::m_Beta, FullyConnectedDescriptor::m_BiasEnabled, Convolution2dDescriptor::m_BiasEnabled, Convolution3dDescriptor::m_BiasEnabled, DepthwiseConvolution2dDescriptor::m_BiasEnabled, TransposeConvolution2dDescriptor::m_BiasEnabled, SpaceToDepthDescriptor::m_BlockSize, FullyConnectedDescriptor::m_ConstantWeights, Pooling2dDescriptor::m_DataLayout, Pooling3dDescriptor::m_DataLayout, Convolution2dDescriptor::m_DataLayout, Convolution3dDescriptor::m_DataLayout, DepthwiseConvolution2dDescriptor::m_DataLayout, NormalizationDescriptor::m_DataLayout, L2NormalizationDescriptor::m_DataLayout, BatchToSpaceNdDescriptor::m_DataLayout, ResizeDescriptor::m_DataLayout, SpaceToBatchNdDescriptor::m_DataLayout, TransposeConvolution2dDescriptor::m_DataLayout, DetectionPostProcessDescriptor::m_DetectionsPerClass, Convolution2dDescriptor::m_DilationX, Convolution3dDescriptor::m_DilationX, DepthwiseConvolution2dDescriptor::m_DilationX, Convolution2dDescriptor::m_DilationY, Convolution3dDescriptor::m_DilationY, DepthwiseConvolution2dDescriptor::m_DilationY, Convolution3dDescriptor::m_DilationZ, StridedSliceDescriptor::m_EndMask, BatchNormalizationDescriptor::m_Eps, ArgMinMaxDescriptor::m_Function, InstanceNormalizationDescriptor::m_Gamma, ResizeDescriptor::m_HalfPixelCenters, DetectionPostProcessDescriptor::m_MaxClassesPerDetection, DetectionPostProcessDescriptor::m_MaxDetections, ResizeDescriptor::m_Method, DetectionPostProcessDescriptor::m_NmsIouThreshold, DetectionPostProcessDescriptor::m_NmsScoreThreshold, DetectionPostProcessDescriptor::m_NumClasses, Convolution3dDescriptor::m_PadBack, Convolution2dDescriptor::m_PadBottom, Convolution3dDescriptor::m_PadBottom, DepthwiseConvolution2dDescriptor::m_PadBottom, TransposeConvolution2dDescriptor::m_PadBottom, PadDescriptor::m_PaddingMode, Convolution3dDescriptor::m_PadFront, Convolution2dDescriptor::m_PadLeft, Convolution3dDescriptor::m_PadLeft, DepthwiseConvolution2dDescriptor::m_PadLeft, TransposeConvolution2dDescriptor::m_PadLeft, Convolution2dDescriptor::m_PadRight, Convolution3dDescriptor::m_PadRight, DepthwiseConvolution2dDescriptor::m_PadRight, TransposeConvolution2dDescriptor::m_PadRight, Convolution2dDescriptor::m_PadTop, Convolution3dDescriptor::m_PadTop, DepthwiseConvolution2dDescriptor::m_PadTop, TransposeConvolution2dDescriptor::m_PadTop, DetectionPostProcessDescriptor::m_ScaleH, DetectionPostProcessDescriptor::m_ScaleW, DetectionPostProcessDescriptor::m_ScaleX, DetectionPostProcessDescriptor::m_ScaleY, Convolution2dDescriptor::m_StrideX, Convolution3dDescriptor::m_StrideX, DepthwiseConvolution2dDescriptor::m_StrideX, TransposeConvolution2dDescriptor::m_StrideX, Convolution2dDescriptor::m_StrideY, Convolution3dDescriptor::m_StrideY, DepthwiseConvolution2dDescriptor::m_StrideY, TransposeConvolution2dDescriptor::m_StrideY, Convolution3dDescriptor::m_StrideZ, ResizeDescriptor::m_TargetHeight, ResizeDescriptor::m_TargetWidth, FullyConnectedDescriptor::m_TransposeWeightMatrix, DetectionPostProcessDescriptor::m_UseRegularNms, ReduceDescriptor::m_vAxis, armnn::Max, armnn::Merge, armnn::NCHW, armnn::NDHWC, armnn::NearestNeighbor, armnn::NHWC, armnn::NotEqual, armnn::Output, armnn::QAsymmU8, armnn::Reflect, armnn::Resize, SerializeNetwork(), TensorInfo::SetQuantizationScale(), IOutputSlot::SetTensorInfo(), ViewsDescriptor::SetViewOriginCoord(), ViewsDescriptor::SetViewSize(), armnn::Signed32, armnn::Signed64, armnn::Sum, armnn::Switch, and LayerVerifierBase::VerifyNameAndConnections().

25 {
26 
27 TEST_CASE("SerializeAddition")
28 {
29  const std::string layerName("addition");
30  const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32);
31 
33  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
34  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
35  armnn::IConnectableLayer* const additionLayer = network->AddAdditionLayer(layerName.c_str());
36  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
37 
38  inputLayer0->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
39  inputLayer1->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
40  additionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
41 
42  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
43  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
44  additionLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
45 
46  std::string serializedNetwork = SerializeNetwork(*network);
47  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(serializedNetwork);
48  CHECK(deserializedNetwork);
49 
50  LayerVerifierBase verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo});
51  deserializedNetwork->ExecuteStrategy(verifier);
52 }
53 
54 void SerializeArgMinMaxTest(armnn::DataType dataType)
55 {
56  const std::string layerName("argminmax");
57  const armnn::TensorInfo inputInfo({1, 2, 3}, armnn::DataType::Float32);
58  const armnn::TensorInfo outputInfo({1, 3}, dataType);
59 
60  armnn::ArgMinMaxDescriptor descriptor;
62  descriptor.m_Axis = 1;
63 
65  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
66  armnn::IConnectableLayer* const argMinMaxLayer = network->AddArgMinMaxLayer(descriptor, layerName.c_str());
67  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
68 
69  inputLayer->GetOutputSlot(0).Connect(argMinMaxLayer->GetInputSlot(0));
70  argMinMaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
71 
72  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
73  argMinMaxLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
74 
75  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
76  CHECK(deserializedNetwork);
77 
79  {inputInfo},
80  {outputInfo},
81  descriptor);
82  deserializedNetwork->ExecuteStrategy(verifier);
83 }
84 
85 TEST_CASE("SerializeArgMinMaxSigned32")
86 {
87  SerializeArgMinMaxTest(armnn::DataType::Signed32);
88 }
89 
90 TEST_CASE("SerializeArgMinMaxSigned64")
91 {
92  SerializeArgMinMaxTest(armnn::DataType::Signed64);
93 }
94 
95 TEST_CASE("SerializeBatchNormalization")
96 {
97  const std::string layerName("batchNormalization");
98  const armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32);
99  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
100 
101  const armnn::TensorInfo meanInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
102  const armnn::TensorInfo varianceInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
103  const armnn::TensorInfo betaInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
104  const armnn::TensorInfo gammaInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
105 
107  descriptor.m_Eps = 0.0010000000475f;
108  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
109 
110  std::vector<float> meanData({5.0});
111  std::vector<float> varianceData({2.0});
112  std::vector<float> betaData({1.0});
113  std::vector<float> gammaData({0.0});
114 
115  std::vector<armnn::ConstTensor> constants;
116  constants.emplace_back(armnn::ConstTensor(meanInfo, meanData));
117  constants.emplace_back(armnn::ConstTensor(varianceInfo, varianceData));
118  constants.emplace_back(armnn::ConstTensor(betaInfo, betaData));
119  constants.emplace_back(armnn::ConstTensor(gammaInfo, gammaData));
120 
122  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
123  armnn::IConnectableLayer* const batchNormalizationLayer =
124  network->AddBatchNormalizationLayer(descriptor,
125  constants[0],
126  constants[1],
127  constants[2],
128  constants[3],
129  layerName.c_str());
130  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
131 
132  inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
133  batchNormalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
134 
135  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
136  batchNormalizationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
137 
138  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
139  CHECK(deserializedNetwork);
140 
142  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
143  deserializedNetwork->ExecuteStrategy(verifier);
144 }
145 
146 TEST_CASE("SerializeBatchToSpaceNd")
147 {
148  const std::string layerName("spaceToBatchNd");
149  const armnn::TensorInfo inputInfo({4, 1, 2, 2}, armnn::DataType::Float32);
150  const armnn::TensorInfo outputInfo({1, 1, 4, 4}, armnn::DataType::Float32);
151 
154  desc.m_BlockShape = {2, 2};
155  desc.m_Crops = {{0, 0}, {0, 0}};
156 
158  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
159  armnn::IConnectableLayer* const batchToSpaceNdLayer = network->AddBatchToSpaceNdLayer(desc, layerName.c_str());
160  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
161 
162  inputLayer->GetOutputSlot(0).Connect(batchToSpaceNdLayer->GetInputSlot(0));
163  batchToSpaceNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
164 
165  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
166  batchToSpaceNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
167 
168  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
169  CHECK(deserializedNetwork);
170 
172  {inputInfo},
173  {outputInfo},
174  desc);
175  deserializedNetwork->ExecuteStrategy(verifier);
176 }
177 
178 TEST_CASE("SerializeCast")
179 {
180  const std::string layerName("cast");
181 
182  const armnn::TensorShape shape{1, 5, 2, 3};
183 
186 
188  armnn::IConnectableLayer* inputLayer = network->AddInputLayer(0);
189  armnn::IConnectableLayer* castLayer = network->AddCastLayer(layerName.c_str());
190  armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0);
191 
192  inputLayer->GetOutputSlot(0).Connect(castLayer->GetInputSlot(0));
193  castLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
194 
195  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
196  castLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
197 
198  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
199  CHECK(deserializedNetwork);
200 
201  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
202  deserializedNetwork->ExecuteStrategy(verifier);
203 }
204 
205 TEST_CASE("SerializeChannelShuffle")
206 {
207  const std::string layerName("channelShuffle");
208  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
209  const armnn::TensorInfo outputInfo({1, 9}, armnn::DataType::Float32);
210 
211  armnn::ChannelShuffleDescriptor descriptor({3, 1});
212 
214  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
215  armnn::IConnectableLayer* const ChannelShuffleLayer =
216  network->AddChannelShuffleLayer(descriptor, layerName.c_str());
217  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
218 
219  inputLayer->GetOutputSlot(0).Connect(ChannelShuffleLayer->GetInputSlot(0));
220  ChannelShuffleLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
221 
222  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
223  ChannelShuffleLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
224 
225  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
226  CHECK(deserializedNetwork);
227 
229  layerName, {inputInfo}, {outputInfo}, descriptor);
230  deserializedNetwork->ExecuteStrategy(verifier);
231 }
232 
233 TEST_CASE("SerializeComparison")
234 {
235  const std::string layerName("comparison");
236 
237  const armnn::TensorShape shape{2, 1, 2, 4};
238 
241 
243 
245  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
246  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
247  armnn::IConnectableLayer* const comparisonLayer = network->AddComparisonLayer(descriptor, layerName.c_str());
248  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
249 
250  inputLayer0->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(0));
251  inputLayer1->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(1));
252  comparisonLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
253 
254  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
255  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
256  comparisonLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
257 
258  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
259  CHECK(deserializedNetwork);
260 
262  { inputInfo, inputInfo },
263  { outputInfo },
264  descriptor);
265  deserializedNetwork->ExecuteStrategy(verifier);
266 }
267 
268 TEST_CASE("SerializeConstant")
269 {
270  class ConstantLayerVerifier : public LayerVerifierBase
271  {
272  public:
273  ConstantLayerVerifier(const std::string& layerName,
274  const std::vector<armnn::TensorInfo>& inputInfos,
275  const std::vector<armnn::TensorInfo>& outputInfos,
276  const std::vector<armnn::ConstTensor>& constants)
277  : LayerVerifierBase(layerName, inputInfos, outputInfos)
278  , m_Constants(constants) {}
279 
280  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
281  const armnn::BaseDescriptor& descriptor,
282  const std::vector<armnn::ConstTensor>& constants,
283  const char* name,
284  const armnn::LayerBindingId id = 0) override
285  {
286  armnn::IgnoreUnused(descriptor, id);
287 
288  switch (layer->GetType())
289  {
290  case armnn::LayerType::Input: break;
291  case armnn::LayerType::Output: break;
292  case armnn::LayerType::Addition: break;
293  default:
294  {
295  this->VerifyNameAndConnections(layer, name);
296 
297  for (std::size_t i = 0; i < constants.size(); i++)
298  {
299  CompareConstTensor(constants[i], m_Constants[i]);
300  }
301  }
302  }
303  }
304 
305  private:
306  const std::vector<armnn::ConstTensor> m_Constants;
307  };
308 
309  const std::string layerName("constant");
310  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
311 
312  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
313  armnn::ConstTensor constTensor(info, constantData);
314 
316  armnn::IConnectableLayer* input = network->AddInputLayer(0);
317  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
318  armnn::IConnectableLayer* add = network->AddAdditionLayer();
319  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
320 
321  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
322  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
323  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
324 
325  input->GetOutputSlot(0).SetTensorInfo(info);
326  constant->GetOutputSlot(0).SetTensorInfo(info);
327  add->GetOutputSlot(0).SetTensorInfo(info);
328 
329  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
330  CHECK(deserializedNetwork);
331 
332  ConstantLayerVerifier verifier(layerName, {}, {info}, {constTensor});
333  deserializedNetwork->ExecuteStrategy(verifier);
334 }
335 
336 using Convolution2dDescriptor = armnn::Convolution2dDescriptor;
337 class Convolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Convolution2dDescriptor>
338 {
339 public:
340  Convolution2dLayerVerifier(const std::string& layerName,
341  const std::vector<armnn::TensorInfo>& inputInfos,
342  const std::vector<armnn::TensorInfo>& outputInfos,
343  const Convolution2dDescriptor& descriptor)
344  : LayerVerifierBaseWithDescriptor<Convolution2dDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
345 
346  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
347  const armnn::BaseDescriptor& descriptor,
348  const std::vector<armnn::ConstTensor>& constants,
349  const char* name,
350  const armnn::LayerBindingId id = 0) override
351  {
352  armnn::IgnoreUnused(constants, id);
353  switch (layer->GetType())
354  {
355  case armnn::LayerType::Input: break;
356  case armnn::LayerType::Output: break;
357  case armnn::LayerType::Constant: break;
358  default:
359  {
360  VerifyNameAndConnections(layer, name);
361  const Convolution2dDescriptor& layerDescriptor =
362  static_cast<const Convolution2dDescriptor&>(descriptor);
363  CHECK(layerDescriptor.m_BiasEnabled == m_Descriptor.m_BiasEnabled);
364  }
365  }
366  }
367 };
368 
369 TEST_CASE("SerializeConvolution2d")
370 {
371  const std::string layerName("convolution2d");
372  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
373  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
374 
375  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
376  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
377 
378  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
379  armnn::ConstTensor weights(weightsInfo, weightsData);
380 
381  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
382  armnn::ConstTensor biases(biasesInfo, biasesData);
383 
385  descriptor.m_PadLeft = 1;
386  descriptor.m_PadRight = 1;
387  descriptor.m_PadTop = 1;
388  descriptor.m_PadBottom = 1;
389  descriptor.m_StrideX = 2;
390  descriptor.m_StrideY = 2;
391  descriptor.m_DilationX = 2;
392  descriptor.m_DilationY = 2;
393  descriptor.m_BiasEnabled = true;
395 
397  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
399  armnn::IConnectableLayer* const convLayer =
400  network->AddConvolution2dLayer(descriptor,
401  weights,
403  layerName.c_str());
405  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
406 
407  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
408  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
409 
410  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
411  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
412 
413  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
414  CHECK(deserializedNetwork);
415 
416  Convolution2dLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
417  deserializedNetwork->ExecuteStrategy(verifier);
418 }
419 
420 TEST_CASE("SerializeConvolution2dWithPerAxisParams")
421 {
422  using namespace armnn;
423 
424  const std::string layerName("convolution2dWithPerAxis");
425  const TensorInfo inputInfo ({ 1, 3, 1, 2 }, DataType::QAsymmU8, 0.55f, 128);
426  const TensorInfo outputInfo({ 1, 3, 1, 3 }, DataType::QAsymmU8, 0.75f, 128);
427 
428  const std::vector<float> quantScales{ 0.75f, 0.65f, 0.85f };
429  constexpr unsigned int quantDimension = 0;
430 
431  const TensorInfo kernelInfo({ 3, 1, 1, 2 }, DataType::QSymmS8, quantScales, quantDimension, true);
432 
433  const std::vector<float> biasQuantScales{ 0.25f, 0.50f, 0.75f };
434  const TensorInfo biasInfo({ 3 }, DataType::Signed32, biasQuantScales, quantDimension, true);
435 
436  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
437  armnn::ConstTensor weights(kernelInfo, kernelData);
438  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
439  armnn::ConstTensor biases(biasInfo, biasData);
440 
441  Convolution2dDescriptor descriptor;
442  descriptor.m_StrideX = 1;
443  descriptor.m_StrideY = 1;
444  descriptor.m_PadLeft = 0;
445  descriptor.m_PadRight = 0;
446  descriptor.m_PadTop = 0;
447  descriptor.m_PadBottom = 0;
448  descriptor.m_BiasEnabled = true;
449  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
450 
452  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
454  armnn::IConnectableLayer* const convLayer =
455  network->AddConvolution2dLayer(descriptor,
456  weights,
458  layerName.c_str());
460  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
461 
462  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
463  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
464 
465  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
466  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
467 
468  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
469  CHECK(deserializedNetwork);
470 
471  Convolution2dLayerVerifier verifier(layerName, {inputInfo, kernelInfo, biasInfo}, {outputInfo}, descriptor);
472 
473  deserializedNetwork->ExecuteStrategy(verifier);
474 }
475 
476 TEST_CASE("SerializeConvolution2dWeightsAndBiasesAsConstantLayers")
477 {
478  const std::string layerName("convolution2d");
479  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
480  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
481 
482  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
483  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
484 
485  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
486  armnn::ConstTensor weights(weightsInfo, weightsData);
487 
488  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
489  armnn::ConstTensor biases(biasesInfo, biasesData);
490 
492  descriptor.m_PadLeft = 1;
493  descriptor.m_PadRight = 1;
494  descriptor.m_PadTop = 1;
495  descriptor.m_PadBottom = 1;
496  descriptor.m_StrideX = 2;
497  descriptor.m_StrideY = 2;
498  descriptor.m_DilationX = 2;
499  descriptor.m_DilationY = 2;
500  descriptor.m_BiasEnabled = true;
502 
504  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
505  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
506  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
507  armnn::IConnectableLayer* const convLayer = network->AddConvolution2dLayer(descriptor,
508  layerName.c_str());
509  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
510 
511  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
512  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
513  biasesLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
514  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
515 
516  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
517  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
518  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
519  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
520 
521  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
522  CHECK(deserializedNetwork);
523 
524  Convolution2dLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
525 
526  deserializedNetwork->ExecuteStrategy(verifier);
527 }
528 
529 TEST_CASE("SerializeConvolution2dWeightsAndBiasesAsConstantLayers")
530 {
531  const std::string layerName("convolution2d");
532  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
533  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
534 
535  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
536  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
537 
538  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
539  armnn::ConstTensor weights(weightsInfo, weightsData);
540 
541  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
542  armnn::ConstTensor biases(biasesInfo, biasesData);
543 
545  descriptor.m_PadLeft = 1;
546  descriptor.m_PadRight = 1;
547  descriptor.m_PadTop = 1;
548  descriptor.m_PadBottom = 1;
549  descriptor.m_StrideX = 2;
550  descriptor.m_StrideY = 2;
551  descriptor.m_DilationX = 2;
552  descriptor.m_DilationY = 2;
553  descriptor.m_BiasEnabled = true;
555 
557  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
558  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
559  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
560  armnn::IConnectableLayer* const convLayer = network->AddConvolution2dLayer(descriptor,
561  layerName.c_str());
562  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
563 
564  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
565  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
566  biasesLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
567  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
568 
569  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
570  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
571  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
572  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
573 
574  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
575  CHECK(deserializedNetwork);
576 
577  const std::vector<armnn::ConstTensor>& constants {weights, biases};
579  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
580 
581  deserializedNetwork->ExecuteStrategy(verifier);
582 }
583 
584 TEST_CASE("SerializeConvolution3d")
585 {
586  const std::string layerName("convolution3d");
587  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 5, 1 }, armnn::DataType::Float32);
588  const armnn::TensorInfo outputInfo({ 1, 2, 2, 2, 1 }, armnn::DataType::Float32);
589 
590  const armnn::TensorInfo weightsInfo({ 3, 3, 3, 1, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
591  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
592 
593  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
594  armnn::ConstTensor weights(weightsInfo, weightsData);
595 
596  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
597  armnn::ConstTensor biases(biasesInfo, biasesData);
598 
600  descriptor.m_PadLeft = 0;
601  descriptor.m_PadRight = 0;
602  descriptor.m_PadTop = 0;
603  descriptor.m_PadBottom = 0;
604  descriptor.m_PadFront = 0;
605  descriptor.m_PadBack = 0;
606  descriptor.m_DilationX = 1;
607  descriptor.m_DilationY = 1;
608  descriptor.m_DilationZ = 1;
609  descriptor.m_StrideX = 2;
610  descriptor.m_StrideY = 2;
611  descriptor.m_StrideZ = 2;
612  descriptor.m_BiasEnabled = true;
614 
616  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
617  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
618  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
619  armnn::IConnectableLayer* const convLayer = network->AddConvolution3dLayer(descriptor, layerName.c_str());
620  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
621 
622  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
623  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
624  biasesLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
625  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
626 
627  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
628  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
629  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
630  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
631 
632  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
633  CHECK(deserializedNetwork);
634 
636  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
637  deserializedNetwork->ExecuteStrategy(verifier);
638 }
639 
640 TEST_CASE("SerializeDepthToSpace")
641 {
642  const std::string layerName("depthToSpace");
643 
644  const armnn::TensorInfo inputInfo ({ 1, 8, 4, 12 }, armnn::DataType::Float32);
645  const armnn::TensorInfo outputInfo({ 1, 16, 8, 3 }, armnn::DataType::Float32);
646 
648  desc.m_BlockSize = 2;
649  desc.m_DataLayout = armnn::DataLayout::NHWC;
650 
652  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
653  armnn::IConnectableLayer* const depthToSpaceLayer = network->AddDepthToSpaceLayer(desc, layerName.c_str());
654  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
655 
656  inputLayer->GetOutputSlot(0).Connect(depthToSpaceLayer->GetInputSlot(0));
657  depthToSpaceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
658 
659  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
660  depthToSpaceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
661 
662  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
663  CHECK(deserializedNetwork);
664 
665  LayerVerifierBaseWithDescriptor<armnn::DepthToSpaceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
666  deserializedNetwork->ExecuteStrategy(verifier);
667 }
668 
669 TEST_CASE("SerializeDepthwiseConvolution2d")
670 {
671  const std::string layerName("depwiseConvolution2d");
672  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32);
673  const armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
674 
675  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
676  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
677 
678  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
679  armnn::ConstTensor weights(weightsInfo, weightsData);
680 
681  std::vector<int32_t> biasesData = GenerateRandomData<int32_t>(biasesInfo.GetNumElements());
682  armnn::ConstTensor biases(biasesInfo, biasesData);
683 
685  descriptor.m_PadLeft = 1;
686  descriptor.m_PadRight = 1;
687  descriptor.m_PadTop = 1;
688  descriptor.m_PadBottom = 1;
689  descriptor.m_StrideX = 2;
690  descriptor.m_StrideY = 2;
691  descriptor.m_DilationX = 2;
692  descriptor.m_DilationY = 2;
693  descriptor.m_BiasEnabled = true;
695 
697  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
698  armnn::IConnectableLayer* const depthwiseConvLayer = network->AddDepthwiseConvolution2dLayer(descriptor,
699  layerName.c_str());
700  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
701 
702  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
703  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
704 
705  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
706  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
707 
708  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights);
709  weightsLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(1u));
710  weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
711 
712  armnn::IConnectableLayer* const biasLayer = network->AddConstantLayer(biases);
713  biasLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(2u));
714  biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
715 
716  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
717  CHECK(deserializedNetwork);
718 
719  const std::vector<armnn::ConstTensor>& constants {weights, biases};
721  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
722  deserializedNetwork->ExecuteStrategy(verifier);
723 }
724 
725 TEST_CASE("SerializeDepthwiseConvolution2dWithPerAxisParams")
726 {
727  using namespace armnn;
728 
729  const std::string layerName("depwiseConvolution2dWithPerAxis");
730  const TensorInfo inputInfo ({ 1, 3, 3, 2 }, DataType::QAsymmU8, 0.55f, 128);
731  const TensorInfo outputInfo({ 1, 2, 2, 4 }, DataType::QAsymmU8, 0.75f, 128);
732 
733  const std::vector<float> quantScales{ 0.75f, 0.80f, 0.90f, 0.95f };
734  const unsigned int quantDimension = 0;
735  TensorInfo kernelInfo({ 2, 2, 2, 2 }, DataType::QSymmS8, quantScales, quantDimension, true);
736 
737  const std::vector<float> biasQuantScales{ 0.25f, 0.35f, 0.45f, 0.55f };
738  constexpr unsigned int biasQuantDimension = 0;
739  TensorInfo biasInfo({ 4 }, DataType::Signed32, biasQuantScales, biasQuantDimension, true);
740 
741  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
742  armnn::ConstTensor weights(kernelInfo, kernelData);
743  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
744  armnn::ConstTensor biases(biasInfo, biasData);
745 
747  descriptor.m_StrideX = 1;
748  descriptor.m_StrideY = 1;
749  descriptor.m_PadLeft = 0;
750  descriptor.m_PadRight = 0;
751  descriptor.m_PadTop = 0;
752  descriptor.m_PadBottom = 0;
753  descriptor.m_DilationX = 1;
754  descriptor.m_DilationY = 1;
755  descriptor.m_BiasEnabled = true;
757 
759  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
760  armnn::IConnectableLayer* const depthwiseConvLayer = network->AddDepthwiseConvolution2dLayer(descriptor,
761  layerName.c_str());
762  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
763 
764  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
765  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
766 
767  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
768  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
769 
770  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights);
771  weightsLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(1u));
772  weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
773 
774  armnn::IConnectableLayer* const biasLayer = network->AddConstantLayer(biases);
775  biasLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(2u));
776  biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
777 
778  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
779  CHECK(deserializedNetwork);
780 
781  const std::vector<armnn::ConstTensor>& constants {weights, biases};
783  layerName, {inputInfo, kernelInfo, biasInfo}, {outputInfo}, descriptor, constants);
784  deserializedNetwork->ExecuteStrategy(verifier);
785 }
786 
787 TEST_CASE("SerializeDepthwiseConvolution2dWeightsAndBiasesAsConstantLayers")
788 {
789  const std::string layerName("depthwiseConvolution2d");
790  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
791  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
792 
793  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
794  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
795 
796  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
797  armnn::ConstTensor weights(weightsInfo, weightsData);
798 
799  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
800  armnn::ConstTensor biases(biasesInfo, biasesData);
801 
803  descriptor.m_PadLeft = 1;
804  descriptor.m_PadRight = 1;
805  descriptor.m_PadTop = 1;
806  descriptor.m_PadBottom = 1;
807  descriptor.m_StrideX = 2;
808  descriptor.m_StrideY = 2;
809  descriptor.m_DilationX = 2;
810  descriptor.m_DilationY = 2;
811  descriptor.m_BiasEnabled = true;
813 
815  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
816  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
817  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
818  armnn::IConnectableLayer* const convLayer = network->AddDepthwiseConvolution2dLayer(descriptor,
819  layerName.c_str());
820  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
821 
822  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
823  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
824  biasesLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
825  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
826 
827  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
828  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
829  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
830  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
831 
832  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
833  CHECK(deserializedNetwork);
834 
835  const std::vector<armnn::ConstTensor>& constants {weights, biases};
837  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
838 
839  deserializedNetwork->ExecuteStrategy(verifier);
840 }
841 
842 TEST_CASE("SerializeDequantize")
843 {
844  const std::string layerName("dequantize");
845  const armnn::TensorInfo inputInfo({ 1, 5, 2, 3 }, armnn::DataType::QAsymmU8, 0.5f, 1);
846  const armnn::TensorInfo outputInfo({ 1, 5, 2, 3 }, armnn::DataType::Float32);
847 
849  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
850  armnn::IConnectableLayer* const dequantizeLayer = network->AddDequantizeLayer(layerName.c_str());
851  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
852 
853  inputLayer->GetOutputSlot(0).Connect(dequantizeLayer->GetInputSlot(0));
854  dequantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
855 
856  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
857  dequantizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
858 
859  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
860  CHECK(deserializedNetwork);
861 
862  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
863  deserializedNetwork->ExecuteStrategy(verifier);
864 }
865 
866 TEST_CASE("SerializeDeserializeDetectionPostProcess")
867 {
868  const std::string layerName("detectionPostProcess");
869 
870  const std::vector<armnn::TensorInfo> inputInfos({
873  });
874 
875  const std::vector<armnn::TensorInfo> outputInfos({
880  });
881 
883  descriptor.m_UseRegularNms = true;
884  descriptor.m_MaxDetections = 3;
885  descriptor.m_MaxClassesPerDetection = 1;
886  descriptor.m_DetectionsPerClass =1;
887  descriptor.m_NmsScoreThreshold = 0.0;
888  descriptor.m_NmsIouThreshold = 0.5;
889  descriptor.m_NumClasses = 2;
890  descriptor.m_ScaleY = 10.0;
891  descriptor.m_ScaleX = 10.0;
892  descriptor.m_ScaleH = 5.0;
893  descriptor.m_ScaleW = 5.0;
894 
895  const armnn::TensorInfo anchorsInfo({ 6, 4 }, armnn::DataType::Float32, 0.0f, 0, true);
896  const std::vector<float> anchorsData({
897  0.5f, 0.5f, 1.0f, 1.0f,
898  0.5f, 0.5f, 1.0f, 1.0f,
899  0.5f, 0.5f, 1.0f, 1.0f,
900  0.5f, 10.5f, 1.0f, 1.0f,
901  0.5f, 10.5f, 1.0f, 1.0f,
902  0.5f, 100.5f, 1.0f, 1.0f
903  });
904  armnn::ConstTensor anchors(anchorsInfo, anchorsData);
905 
907  armnn::IConnectableLayer* const detectionLayer =
908  network->AddDetectionPostProcessLayer(descriptor, anchors, layerName.c_str());
909 
910  for (unsigned int i = 0; i < 2; i++)
911  {
912  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(static_cast<int>(i));
913  inputLayer->GetOutputSlot(0).Connect(detectionLayer->GetInputSlot(i));
914  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfos[i]);
915  }
916 
917  for (unsigned int i = 0; i < 4; i++)
918  {
919  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(static_cast<int>(i));
920  detectionLayer->GetOutputSlot(i).Connect(outputLayer->GetInputSlot(0));
921  detectionLayer->GetOutputSlot(i).SetTensorInfo(outputInfos[i]);
922  }
923 
924  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
925  CHECK(deserializedNetwork);
926 
927  const std::vector<armnn::ConstTensor>& constants {anchors};
929  layerName, inputInfos, outputInfos, descriptor, constants);
930  deserializedNetwork->ExecuteStrategy(verifier);
931 }
932 
933 TEST_CASE("SerializeDivision")
934 {
935  const std::string layerName("division");
936  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
937 
939  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
940  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
941  armnn::IConnectableLayer* const divisionLayer = network->AddDivisionLayer(layerName.c_str());
942  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
943 
944  inputLayer0->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(0));
945  inputLayer1->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(1));
946  divisionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
947 
948  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
949  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
950  divisionLayer->GetOutputSlot(0).SetTensorInfo(info);
951 
952  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
953  CHECK(deserializedNetwork);
954 
955  LayerVerifierBase verifier(layerName, {info, info}, {info});
956  deserializedNetwork->ExecuteStrategy(verifier);
957 }
958 
959 TEST_CASE("SerializeDeserializeComparisonEqual")
960 {
961  const std::string layerName("EqualLayer");
962  const armnn::TensorInfo inputTensorInfo1 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
963  const armnn::TensorInfo inputTensorInfo2 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
964  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Boolean);
965 
967  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
968  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
970  armnn::IConnectableLayer* const equalLayer = network->AddComparisonLayer(equalDescriptor, layerName.c_str());
971  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
972 
973  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
974  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo1);
975  inputLayer2->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
976  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo2);
977  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
978  equalLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
979 
980  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
981  CHECK(deserializedNetwork);
982 
983  LayerVerifierBase verifier(layerName, {inputTensorInfo1, inputTensorInfo2}, {outputTensorInfo});
984  deserializedNetwork->ExecuteStrategy(verifier);
985 }
986 
987 void SerializeElementwiseUnaryTest(armnn::UnaryOperation unaryOperation)
988 {
989  auto layerName = GetUnaryOperationAsCString(unaryOperation);
990 
991  const armnn::TensorShape shape{2, 1, 2, 2};
992 
995 
996  armnn::ElementwiseUnaryDescriptor descriptor(unaryOperation);
997 
999  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1000  armnn::IConnectableLayer* const elementwiseUnaryLayer =
1001  network->AddElementwiseUnaryLayer(descriptor, layerName);
1002  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1003 
1004  inputLayer->GetOutputSlot(0).Connect(elementwiseUnaryLayer->GetInputSlot(0));
1005  elementwiseUnaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1006 
1007  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1008  elementwiseUnaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1009 
1010  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1011 
1012  CHECK(deserializedNetwork);
1013 
1015  verifier(layerName, { inputInfo }, { outputInfo }, descriptor);
1016 
1017  deserializedNetwork->ExecuteStrategy(verifier);
1018 }
1019 
1020 TEST_CASE("SerializeElementwiseUnary")
1021 {
1022  using op = armnn::UnaryOperation;
1023  std::initializer_list<op> allUnaryOperations = {op::Abs, op::Exp, op::Sqrt, op::Rsqrt, op::Neg,
1024  op::LogicalNot, op::Log, op::Sin};
1025 
1026  for (auto unaryOperation : allUnaryOperations)
1027  {
1028  SerializeElementwiseUnaryTest(unaryOperation);
1029  }
1030 }
1031 
1032 TEST_CASE("SerializeFill")
1033 {
1034  const std::string layerName("fill");
1035  const armnn::TensorInfo inputInfo({4}, armnn::DataType::Signed32);
1036  const armnn::TensorInfo outputInfo({1, 3, 3, 1}, armnn::DataType::Float32);
1037 
1038  armnn::FillDescriptor descriptor(1.0f);
1039 
1041  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1042  armnn::IConnectableLayer* const fillLayer = network->AddFillLayer(descriptor, layerName.c_str());
1043  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1044 
1045  inputLayer->GetOutputSlot(0).Connect(fillLayer->GetInputSlot(0));
1046  fillLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1047 
1048  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1049  fillLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1050 
1051  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1052  CHECK(deserializedNetwork);
1053 
1054  LayerVerifierBaseWithDescriptor<armnn::FillDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1055 
1056  deserializedNetwork->ExecuteStrategy(verifier);
1057 }
1058 
1059 TEST_CASE("SerializeFloor")
1060 {
1061  const std::string layerName("floor");
1062  const armnn::TensorInfo info({4,4}, armnn::DataType::Float32);
1063 
1065  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1066  armnn::IConnectableLayer* const floorLayer = network->AddFloorLayer(layerName.c_str());
1067  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1068 
1069  inputLayer->GetOutputSlot(0).Connect(floorLayer->GetInputSlot(0));
1070  floorLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1071 
1072  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1073  floorLayer->GetOutputSlot(0).SetTensorInfo(info);
1074 
1075  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1076  CHECK(deserializedNetwork);
1077 
1078  LayerVerifierBase verifier(layerName, {info}, {info});
1079  deserializedNetwork->ExecuteStrategy(verifier);
1080 }
1081 
1083 class FullyConnectedLayerVerifier : public LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>
1084 {
1085 public:
1086  FullyConnectedLayerVerifier(const std::string& layerName,
1087  const std::vector<armnn::TensorInfo>& inputInfos,
1088  const std::vector<armnn::TensorInfo>& outputInfos,
1089  const FullyConnectedDescriptor& descriptor)
1090  : LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1091 
1092  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1093  const armnn::BaseDescriptor& descriptor,
1094  const std::vector<armnn::ConstTensor>& constants,
1095  const char* name,
1096  const armnn::LayerBindingId id = 0) override
1097  {
1098  armnn::IgnoreUnused(constants, id);
1099  switch (layer->GetType())
1100  {
1101  case armnn::LayerType::Input: break;
1102  case armnn::LayerType::Output: break;
1103  case armnn::LayerType::Constant: break;
1104  default:
1105  {
1106  VerifyNameAndConnections(layer, name);
1107  const FullyConnectedDescriptor& layerDescriptor =
1108  static_cast<const FullyConnectedDescriptor&>(descriptor);
1109  CHECK(layerDescriptor.m_ConstantWeights == m_Descriptor.m_ConstantWeights);
1110  CHECK(layerDescriptor.m_BiasEnabled == m_Descriptor.m_BiasEnabled);
1111  CHECK(layerDescriptor.m_TransposeWeightMatrix == m_Descriptor.m_TransposeWeightMatrix);
1112  }
1113  }
1114  }
1115 };
1116 
1117 TEST_CASE("SerializeFullyConnected")
1118 {
1119  const std::string layerName("fullyConnected");
1120  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1121  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1122 
1123  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1124  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1125  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
1126  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
1127  armnn::ConstTensor weights(weightsInfo, weightsData);
1128  armnn::ConstTensor biases(biasesInfo, biasesData);
1129 
1131  descriptor.m_BiasEnabled = true;
1132  descriptor.m_TransposeWeightMatrix = false;
1133  descriptor.m_ConstantWeights = true;
1134 
1136  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1137  armnn::IConnectableLayer* const weightsInputLayer = network->AddInputLayer(1);
1138  armnn::IConnectableLayer* const biasInputLayer = network->AddInputLayer(2);
1139  armnn::IConnectableLayer* const fullyConnectedLayer =
1140  network->AddFullyConnectedLayer(descriptor,
1141  layerName.c_str());
1142  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1143 
1144  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1145  weightsInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
1146  biasInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
1147  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1148 
1149  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1150  weightsInputLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
1151  biasInputLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
1152  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1153 
1154  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1155  CHECK(deserializedNetwork);
1156 
1157  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
1158  deserializedNetwork->ExecuteStrategy(verifier);
1159 }
1160 
1161 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsInputs")
1162 {
1163  const std::string layerName("fullyConnected_weights_as_inputs");
1164  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1165  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1166 
1167  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32);
1168  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
1169 
1172 
1174  descriptor.m_BiasEnabled = true;
1175  descriptor.m_TransposeWeightMatrix = false;
1176  descriptor.m_ConstantWeights = false;
1177 
1179  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1180  armnn::IConnectableLayer* const weightsInputLayer = network->AddInputLayer(1);
1181  armnn::IConnectableLayer* const biasInputLayer = network->AddInputLayer(2);
1182  armnn::IConnectableLayer* const fullyConnectedLayer =
1183  network->AddFullyConnectedLayer(descriptor,
1184  layerName.c_str());
1185  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1186 
1187  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1188  weightsInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
1189  biasInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
1190  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1191 
1192  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1193  weightsInputLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
1194  biasInputLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
1195  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1196 
1197  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1198  CHECK(deserializedNetwork);
1199 
1200  const std::vector<armnn::ConstTensor> constants {};
1202  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
1203  deserializedNetwork->ExecuteStrategy(verifier);
1204 }
1205 
1206 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsConstantLayers")
1207 {
1208  const std::string layerName("fullyConnected_weights_as_inputs");
1209  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1210  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1211 
1212  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1213  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1214 
1215  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
1216  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
1217  armnn::ConstTensor weights(weightsInfo, weightsData);
1218  armnn::ConstTensor biases(biasesInfo, biasesData);
1219 
1221  descriptor.m_BiasEnabled = true;
1222  descriptor.m_TransposeWeightMatrix = false;
1223  descriptor.m_ConstantWeights = true;
1224 
1226  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1227  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
1228  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
1229  armnn::IConnectableLayer* const fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor,layerName.c_str());
1230  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1231 
1232  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1233  weightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
1234  biasesLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
1235  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1236 
1237  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1238  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
1239  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
1240  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1241 
1242  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1243  CHECK(deserializedNetwork);
1244 
1245  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
1246  deserializedNetwork->ExecuteStrategy(verifier);
1247 }
1248 
1249 TEST_CASE("SerializeGather")
1250 {
1252  class GatherLayerVerifier : public LayerVerifierBaseWithDescriptor<GatherDescriptor>
1253  {
1254  public:
1255  GatherLayerVerifier(const std::string& layerName,
1256  const std::vector<armnn::TensorInfo>& inputInfos,
1257  const std::vector<armnn::TensorInfo>& outputInfos,
1258  const GatherDescriptor& descriptor)
1259  : LayerVerifierBaseWithDescriptor<GatherDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1260 
1261  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1262  const armnn::BaseDescriptor& descriptor,
1263  const std::vector<armnn::ConstTensor>& constants,
1264  const char* name,
1265  const armnn::LayerBindingId id = 0) override
1266  {
1267  armnn::IgnoreUnused(constants, id);
1268  switch (layer->GetType())
1269  {
1270  case armnn::LayerType::Input: break;
1271  case armnn::LayerType::Output: break;
1272  case armnn::LayerType::Constant: break;
1273  default:
1274  {
1275  VerifyNameAndConnections(layer, name);
1276  const GatherDescriptor& layerDescriptor = static_cast<const GatherDescriptor&>(descriptor);
1277  CHECK(layerDescriptor.m_Axis == m_Descriptor.m_Axis);
1278  }
1279  }
1280  }
1281  };
1282 
1283  const std::string layerName("gather");
1284  armnn::TensorInfo paramsInfo({ 8 }, armnn::DataType::QAsymmU8);
1285  armnn::TensorInfo outputInfo({ 3 }, armnn::DataType::QAsymmU8);
1286  const armnn::TensorInfo indicesInfo({ 3 }, armnn::DataType::Signed32, 0.0f, 0, true);
1287  GatherDescriptor descriptor;
1288  descriptor.m_Axis = 1;
1289 
1290  paramsInfo.SetQuantizationScale(1.0f);
1291  paramsInfo.SetQuantizationOffset(0);
1292  outputInfo.SetQuantizationScale(1.0f);
1293  outputInfo.SetQuantizationOffset(0);
1294 
1295  const std::vector<int32_t>& indicesData = {7, 6, 5};
1296 
1298  armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
1299  armnn::IConnectableLayer *const constantLayer =
1300  network->AddConstantLayer(armnn::ConstTensor(indicesInfo, indicesData));
1301  armnn::IConnectableLayer *const gatherLayer = network->AddGatherLayer(descriptor, layerName.c_str());
1302  armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
1303 
1304  inputLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(0));
1305  constantLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(1));
1306  gatherLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1307 
1308  inputLayer->GetOutputSlot(0).SetTensorInfo(paramsInfo);
1309  constantLayer->GetOutputSlot(0).SetTensorInfo(indicesInfo);
1310  gatherLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1311 
1312  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1313  CHECK(deserializedNetwork);
1314 
1315  GatherLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo}, descriptor);
1316  deserializedNetwork->ExecuteStrategy(verifier);
1317 }
1318 
1319 TEST_CASE("SerializeGatherNd")
1320 {
1321  class GatherNdLayerVerifier : public LayerVerifierBase
1322  {
1323  public:
1324  GatherNdLayerVerifier(const std::string& layerName,
1325  const std::vector<armnn::TensorInfo>& inputInfos,
1326  const std::vector<armnn::TensorInfo>& outputInfos)
1327  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
1328 
1329  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1330  const armnn::BaseDescriptor& descriptor,
1331  const std::vector<armnn::ConstTensor>& constants,
1332  const char* name,
1333  const armnn::LayerBindingId id = 0) override
1334  {
1335  armnn::IgnoreUnused(constants, id);
1336  switch (layer->GetType())
1337  {
1341  break;
1342  default:
1343  {
1344  VerifyNameAndConnections(layer, name);
1345  }
1346  }
1347  }
1348  };
1349 
1350  const std::string layerName("gatherNd");
1351  armnn::TensorInfo paramsInfo({ 6, 3 }, armnn::DataType::QAsymmU8);
1352  armnn::TensorInfo outputInfo({ 3, 3 }, armnn::DataType::QAsymmU8);
1353  const armnn::TensorInfo indicesInfo({ 3, 1 }, armnn::DataType::Signed32, 0.0f, 0, true);
1354 
1355  paramsInfo.SetQuantizationScale(1.0f);
1356  paramsInfo.SetQuantizationOffset(0);
1357  outputInfo.SetQuantizationScale(1.0f);
1358  outputInfo.SetQuantizationOffset(0);
1359 
1360  const std::vector<int32_t>& indicesData = {5, 1, 0};
1361 
1363  armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
1364  armnn::IConnectableLayer *const constantLayer =
1365  network->AddConstantLayer(armnn::ConstTensor(indicesInfo, indicesData));
1366  armnn::IConnectableLayer *const gatherNdLayer = network->AddGatherNdLayer(layerName.c_str());
1367  armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
1368 
1369  inputLayer->GetOutputSlot(0).Connect(gatherNdLayer->GetInputSlot(0));
1370  constantLayer->GetOutputSlot(0).Connect(gatherNdLayer->GetInputSlot(1));
1371  gatherNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1372 
1373  inputLayer->GetOutputSlot(0).SetTensorInfo(paramsInfo);
1374  constantLayer->GetOutputSlot(0).SetTensorInfo(indicesInfo);
1375  gatherNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1376 
1377  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1378  CHECK(deserializedNetwork);
1379 
1380  GatherNdLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo});
1381  deserializedNetwork->ExecuteStrategy(verifier);
1382 }
1383 
1384 TEST_CASE("SerializeComparisonGreater")
1385 {
1386  const std::string layerName("greater");
1387 
1388  const armnn::TensorShape shape{2, 1, 2, 4};
1389 
1392 
1394  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1395  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1397  armnn::IConnectableLayer* const equalLayer = network->AddComparisonLayer(greaterDescriptor, layerName.c_str());
1398  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1399 
1400  inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
1401  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
1402  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1403 
1404  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1405  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1406  equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1407 
1408  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1409  CHECK(deserializedNetwork);
1410 
1411  LayerVerifierBase verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
1412  deserializedNetwork->ExecuteStrategy(verifier);
1413 }
1414 
1415 
1416 TEST_CASE("SerializeInstanceNormalization")
1417 {
1418  const std::string layerName("instanceNormalization");
1419  const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32);
1420 
1422  descriptor.m_Gamma = 1.1f;
1423  descriptor.m_Beta = 0.1f;
1424  descriptor.m_Eps = 0.0001f;
1425  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
1426 
1428  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1429  armnn::IConnectableLayer* const instanceNormLayer =
1430  network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1431  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1432 
1433  inputLayer->GetOutputSlot(0).Connect(instanceNormLayer->GetInputSlot(0));
1434  instanceNormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1435 
1436  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1437  instanceNormLayer->GetOutputSlot(0).SetTensorInfo(info);
1438 
1439  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1440  CHECK(deserializedNetwork);
1441 
1443  layerName, {info}, {info}, descriptor);
1444  deserializedNetwork->ExecuteStrategy(verifier);
1445 }
1446 
1447 TEST_CASE("SerializeL2Normalization")
1448 {
1449  const std::string l2NormLayerName("l2Normalization");
1450  const armnn::TensorInfo info({1, 2, 1, 5}, armnn::DataType::Float32);
1451 
1454  desc.m_Eps = 0.0001f;
1455 
1457  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1458  armnn::IConnectableLayer* const l2NormLayer = network->AddL2NormalizationLayer(desc, l2NormLayerName.c_str());
1459  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1460 
1461  inputLayer0->GetOutputSlot(0).Connect(l2NormLayer->GetInputSlot(0));
1462  l2NormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1463 
1464  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1465  l2NormLayer->GetOutputSlot(0).SetTensorInfo(info);
1466 
1467  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1468  CHECK(deserializedNetwork);
1469 
1471  l2NormLayerName, {info}, {info}, desc);
1472  deserializedNetwork->ExecuteStrategy(verifier);
1473 }
1474 
1475 TEST_CASE("EnsureL2NormalizationBackwardCompatibility")
1476 {
1477  // The hex data below is a flat buffer containing a simple network with one input
1478  // a L2Normalization layer and an output layer with dimensions as per the tensor infos below.
1479  //
1480  // This test verifies that we can still read back these old style
1481  // models without the normalization epsilon value.
1482  const std::vector<uint8_t> l2NormalizationModel =
1483  {
1484  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1485  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1486  0x3C, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1487  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1488  0x04, 0x00, 0x00, 0x00, 0xD6, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
1489  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
1490  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1491  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1492  0x4C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
1493  0x00, 0x20, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1494  0x20, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x06, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1495  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1496  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1F, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x20, 0x00,
1497  0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x6C, 0x32, 0x4E, 0x6F, 0x72, 0x6D, 0x61, 0x6C, 0x69, 0x7A, 0x61, 0x74,
1498  0x69, 0x6F, 0x6E, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00,
1499  0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1500  0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
1501  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
1502  0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1503  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1504  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1505  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1506  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1507  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1508  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1509  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1510  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1511  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1512  0x05, 0x00, 0x00, 0x00, 0x00
1513  };
1514 
1515  armnn::INetworkPtr deserializedNetwork =
1516  DeserializeNetwork(std::string(l2NormalizationModel.begin(), l2NormalizationModel.end()));
1517  CHECK(deserializedNetwork);
1518 
1519  const std::string layerName("l2Normalization");
1520  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 2, 1, 5}, armnn::DataType::Float32);
1521 
1524  // Since this variable does not exist in the l2NormalizationModel dump, the default value will be loaded
1525  desc.m_Eps = 1e-12f;
1526 
1528  layerName, {inputInfo}, {inputInfo}, desc);
1529  deserializedNetwork->ExecuteStrategy(verifier);
1530 }
1531 
1532 TEST_CASE("SerializeLogicalBinary")
1533 {
1534  const std::string layerName("logicalBinaryAnd");
1535 
1536  const armnn::TensorShape shape{2, 1, 2, 2};
1537 
1540 
1542 
1544  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1545  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1546  armnn::IConnectableLayer* const logicalBinaryLayer = network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1547  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1548 
1549  inputLayer0->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(0));
1550  inputLayer1->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(1));
1551  logicalBinaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1552 
1553  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1554  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1555  logicalBinaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1556 
1557  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1558  CHECK(deserializedNetwork);
1559 
1561  layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
1562  deserializedNetwork->ExecuteStrategy(verifier);
1563 }
1564 
1565 TEST_CASE("SerializeLogSoftmax")
1566 {
1567  const std::string layerName("log_softmax");
1568  const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32);
1569 
1570  armnn::LogSoftmaxDescriptor descriptor;
1571  descriptor.m_Beta = 1.0f;
1572  descriptor.m_Axis = -1;
1573 
1575  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1576  armnn::IConnectableLayer* const logSoftmaxLayer = network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1577  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1578 
1579  inputLayer->GetOutputSlot(0).Connect(logSoftmaxLayer->GetInputSlot(0));
1580  logSoftmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1581 
1582  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1583  logSoftmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
1584 
1585  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1586  CHECK(deserializedNetwork);
1587 
1588  LayerVerifierBaseWithDescriptor<armnn::LogSoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
1589  deserializedNetwork->ExecuteStrategy(verifier);
1590 }
1591 
1592 TEST_CASE("SerializeMaximum")
1593 {
1594  const std::string layerName("maximum");
1595  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1596 
1598  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1599  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1600  armnn::IConnectableLayer* const maximumLayer = network->AddMaximumLayer(layerName.c_str());
1601  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1602 
1603  inputLayer0->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(0));
1604  inputLayer1->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(1));
1605  maximumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1606 
1607  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1608  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1609  maximumLayer->GetOutputSlot(0).SetTensorInfo(info);
1610 
1611  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1612  CHECK(deserializedNetwork);
1613 
1614  LayerVerifierBase verifier(layerName, {info, info}, {info});
1615  deserializedNetwork->ExecuteStrategy(verifier);
1616 }
1617 
1618 TEST_CASE("SerializeMean")
1619 {
1620  const std::string layerName("mean");
1621  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1622  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1623 
1624  armnn::MeanDescriptor descriptor;
1625  descriptor.m_Axis = { 2 };
1626  descriptor.m_KeepDims = true;
1627 
1629  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1630  armnn::IConnectableLayer* const meanLayer = network->AddMeanLayer(descriptor, layerName.c_str());
1631  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1632 
1633  inputLayer->GetOutputSlot(0).Connect(meanLayer->GetInputSlot(0));
1634  meanLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1635 
1636  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1637  meanLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1638 
1639  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1640  CHECK(deserializedNetwork);
1641 
1642  LayerVerifierBaseWithDescriptor<armnn::MeanDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1643  deserializedNetwork->ExecuteStrategy(verifier);
1644 }
1645 
1646 TEST_CASE("SerializeMerge")
1647 {
1648  const std::string layerName("merge");
1649  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1650 
1652  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1653  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1654  armnn::IConnectableLayer* const mergeLayer = network->AddMergeLayer(layerName.c_str());
1655  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1656 
1657  inputLayer0->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(0));
1658  inputLayer1->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(1));
1659  mergeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1660 
1661  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1662  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1663  mergeLayer->GetOutputSlot(0).SetTensorInfo(info);
1664 
1665  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1666  CHECK(deserializedNetwork);
1667 
1668  LayerVerifierBase verifier(layerName, {info, info}, {info});
1669  deserializedNetwork->ExecuteStrategy(verifier);
1670 }
1671 
1672 class MergerLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>
1673 {
1674 public:
1675  MergerLayerVerifier(const std::string& layerName,
1676  const std::vector<armnn::TensorInfo>& inputInfos,
1677  const std::vector<armnn::TensorInfo>& outputInfos,
1678  const armnn::OriginsDescriptor& descriptor)
1679  : LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1680 
1681  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1682  const armnn::BaseDescriptor& descriptor,
1683  const std::vector<armnn::ConstTensor>& constants,
1684  const char* name,
1685  const armnn::LayerBindingId id = 0) override
1686  {
1687  armnn::IgnoreUnused(descriptor, constants, id);
1688  switch (layer->GetType())
1689  {
1690  case armnn::LayerType::Input: break;
1691  case armnn::LayerType::Output: break;
1693  {
1694  throw armnn::Exception("MergerLayer should have translated to ConcatLayer");
1695  break;
1696  }
1698  {
1699  VerifyNameAndConnections(layer, name);
1700  const armnn::MergerDescriptor& layerDescriptor =
1701  static_cast<const armnn::MergerDescriptor&>(descriptor);
1702  VerifyDescriptor(layerDescriptor);
1703  break;
1704  }
1705  default:
1706  {
1707  throw armnn::Exception("Unexpected layer type in Merge test model");
1708  }
1709  }
1710  }
1711 };
1712 
1713 TEST_CASE("EnsureMergerLayerBackwardCompatibility")
1714 {
1715  // The hex data below is a flat buffer containing a simple network with two inputs
1716  // a merger layer (now deprecated) and an output layer with dimensions as per the tensor infos below.
1717  //
1718  // This test verifies that we can still read back these old style
1719  // models replacing the MergerLayers with ConcatLayers with the same parameters.
1720  const std::vector<uint8_t> mergerModel =
1721  {
1722  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1723  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1724  0x38, 0x02, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1725  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1726  0xF4, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1727  0x00, 0x00, 0x9A, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1728  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1729  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1730  0xF8, 0xFE, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1731  0x00, 0x1F, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1732  0x68, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1733  0x0C, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1734  0x02, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x22, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1735  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1736  0x00, 0x00, 0x00, 0x00, 0x3E, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1737  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1738  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1739  0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x6D, 0x65, 0x72, 0x67, 0x65, 0x72, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1740  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1741  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1742  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00,
1743  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1744  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1745  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1746  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1747  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1748  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1749  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1750  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1751  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1752  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1753  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1754  0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1755  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1756  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1757  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1758  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1759  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1760  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1761  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1762  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1763  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1764  0x02, 0x00, 0x00, 0x00
1765  };
1766 
1767  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(mergerModel.begin(), mergerModel.end()));
1768  CHECK(deserializedNetwork);
1769 
1770  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 2, 3, 2, 2 }, armnn::DataType::Float32);
1771  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 4, 3, 2, 2 }, armnn::DataType::Float32);
1772 
1773  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1774 
1775  armnn::OriginsDescriptor descriptor =
1776  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1777 
1778  MergerLayerVerifier verifier("merger", { inputInfo, inputInfo }, { outputInfo }, descriptor);
1779  deserializedNetwork->ExecuteStrategy(verifier);
1780 }
1781 
1782 TEST_CASE("SerializeConcat")
1783 {
1784  const std::string layerName("concat");
1785  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1786  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1787 
1788  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1789 
1790  armnn::OriginsDescriptor descriptor =
1791  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1792 
1794  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1795  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1796  armnn::IConnectableLayer* const concatLayer = network->AddConcatLayer(descriptor, layerName.c_str());
1797  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1798 
1799  inputLayerOne->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
1800  inputLayerTwo->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
1801  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1802 
1803  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1804  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1805  concatLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1806 
1807  std::string concatLayerNetwork = SerializeNetwork(*network);
1808  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(concatLayerNetwork);
1809  CHECK(deserializedNetwork);
1810 
1811  // NOTE: using the MergerLayerVerifier to ensure that it is a concat layer and not a
1812  // merger layer that gets placed into the graph.
1813  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1814  deserializedNetwork->ExecuteStrategy(verifier);
1815 }
1816 
1817 TEST_CASE("SerializeMinimum")
1818 {
1819  const std::string layerName("minimum");
1820  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1821 
1823  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1824  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1825  armnn::IConnectableLayer* const minimumLayer = network->AddMinimumLayer(layerName.c_str());
1826  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1827 
1828  inputLayer0->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(0));
1829  inputLayer1->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(1));
1830  minimumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1831 
1832  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1833  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1834  minimumLayer->GetOutputSlot(0).SetTensorInfo(info);
1835 
1836  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1837  CHECK(deserializedNetwork);
1838 
1839  LayerVerifierBase verifier(layerName, {info, info}, {info});
1840  deserializedNetwork->ExecuteStrategy(verifier);
1841 }
1842 
1843 TEST_CASE("SerializeMultiplication")
1844 {
1845  const std::string layerName("multiplication");
1846  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
1847 
1849  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1850  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1851  armnn::IConnectableLayer* const multiplicationLayer = network->AddMultiplicationLayer(layerName.c_str());
1852  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1853 
1854  inputLayer0->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
1855  inputLayer1->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
1856  multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1857 
1858  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1859  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1860  multiplicationLayer->GetOutputSlot(0).SetTensorInfo(info);
1861 
1862  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1863  CHECK(deserializedNetwork);
1864 
1865  LayerVerifierBase verifier(layerName, {info, info}, {info});
1866  deserializedNetwork->ExecuteStrategy(verifier);
1867 }
1868 
1869 TEST_CASE("SerializePrelu")
1870 {
1871  const std::string layerName("prelu");
1872 
1873  armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32);
1874  armnn::TensorInfo alphaTensorInfo ({ 5, 4, 3, 1 }, armnn::DataType::Float32);
1875  armnn::TensorInfo outputTensorInfo({ 5, 4, 3, 2 }, armnn::DataType::Float32);
1876 
1878  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1879  armnn::IConnectableLayer* const alphaLayer = network->AddInputLayer(1);
1880  armnn::IConnectableLayer* const preluLayer = network->AddPreluLayer(layerName.c_str());
1881  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1882 
1883  inputLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(0));
1884  alphaLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(1));
1885  preluLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1886 
1887  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1888  alphaLayer->GetOutputSlot(0).SetTensorInfo(alphaTensorInfo);
1889  preluLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1890 
1891  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1892  CHECK(deserializedNetwork);
1893 
1894  LayerVerifierBase verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
1895  deserializedNetwork->ExecuteStrategy(verifier);
1896 }
1897 
1898 TEST_CASE("SerializeNormalization")
1899 {
1900  const std::string layerName("normalization");
1901  const armnn::TensorInfo info({2, 1, 2, 2}, armnn::DataType::Float32);
1902 
1905  desc.m_NormSize = 3;
1906  desc.m_Alpha = 1;
1907  desc.m_Beta = 1;
1908  desc.m_K = 1;
1909 
1911  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1912  armnn::IConnectableLayer* const normalizationLayer = network->AddNormalizationLayer(desc, layerName.c_str());
1913  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1914 
1915  inputLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
1916  normalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1917 
1918  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1919  normalizationLayer->GetOutputSlot(0).SetTensorInfo(info);
1920 
1921  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1922  CHECK(deserializedNetwork);
1923 
1924  LayerVerifierBaseWithDescriptor<armnn::NormalizationDescriptor> verifier(layerName, {info}, {info}, desc);
1925  deserializedNetwork->ExecuteStrategy(verifier);
1926 }
1927 
1928 TEST_CASE("SerializePad")
1929 {
1930  const std::string layerName("pad");
1931  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1932  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1933 
1934  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1935 
1937  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1938  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1939  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1940 
1941  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1942  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1943 
1944  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1945  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1946 
1947  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1948  CHECK(deserializedNetwork);
1949 
1951  {inputTensorInfo},
1952  {outputTensorInfo},
1953  desc);
1954  deserializedNetwork->ExecuteStrategy(verifier);
1955 }
1956 
1957 TEST_CASE("SerializePadReflect")
1958 {
1959  const std::string layerName("padReflect");
1960  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1961  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1962 
1963  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1965 
1967  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1968  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1969  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1970 
1971  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1972  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1973 
1974  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1975  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1976 
1977  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1978  CHECK(deserializedNetwork);
1979 
1981  {inputTensorInfo},
1982  {outputTensorInfo},
1983  desc);
1984  deserializedNetwork->ExecuteStrategy(verifier);
1985 }
1986 
1987 TEST_CASE("EnsurePadBackwardCompatibility")
1988 {
1989  // The PadDescriptor is being extended with a float PadValue (so a value other than 0
1990  // can be used to pad the tensor.
1991  //
1992  // This test contains a binary representation of a simple input->pad->output network
1993  // prior to this change to test that the descriptor has been updated in a backward
1994  // compatible way with respect to Deserialization of older binary dumps
1995  const std::vector<uint8_t> padModel =
1996  {
1997  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1998  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1999  0x54, 0x01, 0x00, 0x00, 0x6C, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
2000  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD0, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
2001  0x04, 0x00, 0x00, 0x00, 0x96, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
2002  0x00, 0x00, 0x72, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
2003  0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
2004  0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00,
2005  0x00, 0x00, 0x00, 0x00, 0x24, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x16, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00,
2006  0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x4C, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
2007  0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00,
2008  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2009  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
2010  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
2011  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00,
2012  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x70, 0x61, 0x64, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00,
2013  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
2014  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
2015  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
2016  0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
2017  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00,
2018  0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00,
2019  0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
2020  0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00,
2021  0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
2022  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
2023  0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
2024  0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
2025  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
2026  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00
2027  };
2028 
2029  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(padModel.begin(), padModel.end()));
2030  CHECK(deserializedNetwork);
2031 
2032  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 1, 2, 3, 4 }, armnn::DataType::Float32);
2033  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 1, 3, 5, 7 }, armnn::DataType::Float32);
2034 
2035  armnn::PadDescriptor descriptor({{ 0, 0 }, { 1, 0 }, { 1, 1 }, { 1, 2 }});
2036 
2037  LayerVerifierBaseWithDescriptor<armnn::PadDescriptor> verifier("pad", { inputInfo }, { outputInfo }, descriptor);
2038  deserializedNetwork->ExecuteStrategy(verifier);
2039 }
2040 
2041 TEST_CASE("SerializePermute")
2042 {
2043  const std::string layerName("permute");
2044  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
2045  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
2046 
2047  armnn::PermuteDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
2048 
2050  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2051  armnn::IConnectableLayer* const permuteLayer = network->AddPermuteLayer(descriptor, layerName.c_str());
2052  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2053 
2054  inputLayer->GetOutputSlot(0).Connect(permuteLayer->GetInputSlot(0));
2055  permuteLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2056 
2057  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2058  permuteLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2059 
2060  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2061  CHECK(deserializedNetwork);
2062 
2064  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
2065  deserializedNetwork->ExecuteStrategy(verifier);
2066 }
2067 
2068 TEST_CASE("SerializePooling2d")
2069 {
2070  const std::string layerName("pooling2d");
2071  const armnn::TensorInfo inputInfo({1, 2, 2, 1}, armnn::DataType::Float32);
2072  const armnn::TensorInfo outputInfo({1, 1, 1, 1}, armnn::DataType::Float32);
2073 
2076  desc.m_PadTop = 0;
2077  desc.m_PadBottom = 0;
2078  desc.m_PadLeft = 0;
2079  desc.m_PadRight = 0;
2080  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2081  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2082  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2083  desc.m_PoolHeight = 2;
2084  desc.m_PoolWidth = 2;
2085  desc.m_StrideX = 2;
2086  desc.m_StrideY = 2;
2087 
2089  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2090  armnn::IConnectableLayer* const pooling2dLayer = network->AddPooling2dLayer(desc, layerName.c_str());
2091  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2092 
2093  inputLayer->GetOutputSlot(0).Connect(pooling2dLayer->GetInputSlot(0));
2094  pooling2dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2095 
2096  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2097  pooling2dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2098 
2099  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2100  CHECK(deserializedNetwork);
2101 
2103  layerName, {inputInfo}, {outputInfo}, desc);
2104  deserializedNetwork->ExecuteStrategy(verifier);
2105 }
2106 
2107 TEST_CASE("SerializePooling3d")
2108 {
2109  const std::string layerName("pooling3d");
2110  const armnn::TensorInfo inputInfo({1, 1, 2, 2, 2}, armnn::DataType::Float32);
2111  const armnn::TensorInfo outputInfo({1, 1, 1, 1, 1}, armnn::DataType::Float32);
2112 
2115  desc.m_PadFront = 0;
2116  desc.m_PadBack = 0;
2117  desc.m_PadTop = 0;
2118  desc.m_PadBottom = 0;
2119  desc.m_PadLeft = 0;
2120  desc.m_PadRight = 0;
2121  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2122  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2123  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2124  desc.m_PoolHeight = 2;
2125  desc.m_PoolWidth = 2;
2126  desc.m_PoolDepth = 2;
2127  desc.m_StrideX = 2;
2128  desc.m_StrideY = 2;
2129  desc.m_StrideZ = 2;
2130 
2132  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2133  armnn::IConnectableLayer* const pooling3dLayer = network->AddPooling3dLayer(desc, layerName.c_str());
2134  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2135 
2136  inputLayer->GetOutputSlot(0).Connect(pooling3dLayer->GetInputSlot(0));
2137  pooling3dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2138 
2139  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2140  pooling3dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2141 
2142  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2143  CHECK(deserializedNetwork);
2144 
2146  layerName, {inputInfo}, {outputInfo}, desc);
2147  deserializedNetwork->ExecuteStrategy(verifier);
2148 }
2149 
2150 TEST_CASE("SerializeQuantize")
2151 {
2152  const std::string layerName("quantize");
2153  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
2154 
2156  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2157  armnn::IConnectableLayer* const quantizeLayer = network->AddQuantizeLayer(layerName.c_str());
2158  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2159 
2160  inputLayer->GetOutputSlot(0).Connect(quantizeLayer->GetInputSlot(0));
2161  quantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2162 
2163  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2164  quantizeLayer->GetOutputSlot(0).SetTensorInfo(info);
2165 
2166  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2167  CHECK(deserializedNetwork);
2168 
2169  LayerVerifierBase verifier(layerName, {info}, {info});
2170  deserializedNetwork->ExecuteStrategy(verifier);
2171 }
2172 
2173 TEST_CASE("SerializeRank")
2174 {
2175  const std::string layerName("rank");
2176  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
2177  const armnn::TensorInfo outputInfo({1}, armnn::DataType::Signed32);
2178 
2180  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2181  armnn::IConnectableLayer* const rankLayer = network->AddRankLayer(layerName.c_str());
2182  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2183 
2184  inputLayer->GetOutputSlot(0).Connect(rankLayer->GetInputSlot(0));
2185  rankLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2186 
2187  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2188  rankLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2189 
2190  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2191  CHECK(deserializedNetwork);
2192 
2193  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
2194  deserializedNetwork->ExecuteStrategy(verifier);
2195 }
2196 
2197 TEST_CASE("SerializeReduceSum")
2198 {
2199  const std::string layerName("Reduce_Sum");
2200  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
2201  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
2202 
2203  armnn::ReduceDescriptor descriptor;
2204  descriptor.m_vAxis = { 2 };
2205  descriptor.m_ReduceOperation = armnn::ReduceOperation::Sum;
2206 
2208  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2209  armnn::IConnectableLayer* const reduceSumLayer = network->AddReduceLayer(descriptor, layerName.c_str());
2210  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2211 
2212  inputLayer->GetOutputSlot(0).Connect(reduceSumLayer->GetInputSlot(0));
2213  reduceSumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2214 
2215  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2216  reduceSumLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2217 
2218  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2219  CHECK(deserializedNetwork);
2220 
2221  LayerVerifierBaseWithDescriptor<armnn::ReduceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2222  deserializedNetwork->ExecuteStrategy(verifier);
2223 }
2224 
2225 TEST_CASE("SerializeReshape")
2226 {
2227  const std::string layerName("reshape");
2228  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
2229  const armnn::TensorInfo outputInfo({3, 3}, armnn::DataType::Float32);
2230 
2231  armnn::ReshapeDescriptor descriptor({3, 3});
2232 
2234  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2235  armnn::IConnectableLayer* const reshapeLayer = network->AddReshapeLayer(descriptor, layerName.c_str());
2236  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2237 
2238  inputLayer->GetOutputSlot(0).Connect(reshapeLayer->GetInputSlot(0));
2239  reshapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2240 
2241  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2242  reshapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2243 
2244  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2245  CHECK(deserializedNetwork);
2246 
2248  layerName, {inputInfo}, {outputInfo}, descriptor);
2249  deserializedNetwork->ExecuteStrategy(verifier);
2250 }
2251 
2252 TEST_CASE("SerializeResize")
2253 {
2254  const std::string layerName("resize");
2255  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2256  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2257 
2259  desc.m_TargetWidth = 4;
2260  desc.m_TargetHeight = 2;
2261  desc.m_Method = armnn::ResizeMethod::NearestNeighbor;
2262  desc.m_AlignCorners = true;
2263  desc.m_HalfPixelCenters = true;
2264 
2266  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2267  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
2268  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2269 
2270  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2271  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2272 
2273  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2274  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2275 
2276  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2277  CHECK(deserializedNetwork);
2278 
2279  LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
2280  deserializedNetwork->ExecuteStrategy(verifier);
2281 }
2282 
2283 class ResizeBilinearLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor>
2284 {
2285 public:
2286  ResizeBilinearLayerVerifier(const std::string& layerName,
2287  const std::vector<armnn::TensorInfo>& inputInfos,
2288  const std::vector<armnn::TensorInfo>& outputInfos,
2289  const armnn::ResizeDescriptor& descriptor)
2291  layerName, inputInfos, outputInfos, descriptor) {}
2292 
2293  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2294  const armnn::BaseDescriptor& descriptor,
2295  const std::vector<armnn::ConstTensor>& constants,
2296  const char* name,
2297  const armnn::LayerBindingId id = 0) override
2298  {
2299  armnn::IgnoreUnused(descriptor, constants, id);
2300  switch (layer->GetType())
2301  {
2302  case armnn::LayerType::Input: break;
2303  case armnn::LayerType::Output: break;
2305  {
2306  VerifyNameAndConnections(layer, name);
2307  const armnn::ResizeDescriptor& layerDescriptor =
2308  static_cast<const armnn::ResizeDescriptor&>(descriptor);
2309  CHECK(layerDescriptor.m_Method == armnn::ResizeMethod::Bilinear);
2310  CHECK(layerDescriptor.m_TargetWidth == m_Descriptor.m_TargetWidth);
2311  CHECK(layerDescriptor.m_TargetHeight == m_Descriptor.m_TargetHeight);
2312  CHECK(layerDescriptor.m_DataLayout == m_Descriptor.m_DataLayout);
2313  CHECK(layerDescriptor.m_AlignCorners == m_Descriptor.m_AlignCorners);
2314  CHECK(layerDescriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters);
2315  break;
2316  }
2317  default:
2318  {
2319  throw armnn::Exception("Unexpected layer type in test model. ResizeBiliniar "
2320  "should have translated to Resize");
2321  }
2322  }
2323  }
2324 };
2325 
2326 TEST_CASE("SerializeResizeBilinear")
2327 {
2328  const std::string layerName("resizeBilinear");
2329  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2330  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2331 
2334  desc.m_TargetWidth = 4u;
2335  desc.m_TargetHeight = 2u;
2336  desc.m_AlignCorners = true;
2337  desc.m_HalfPixelCenters = true;
2338 
2340  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2341  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
2342  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2343 
2344  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2345  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2346 
2347  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2348  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2349 
2350  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2351  CHECK(deserializedNetwork);
2352 
2353  ResizeBilinearLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2354  deserializedNetwork->ExecuteStrategy(verifier);
2355 }
2356 
2357 TEST_CASE("EnsureResizeBilinearBackwardCompatibility")
2358 {
2359  // The hex data below is a flat buffer containing a simple network with an input,
2360  // a ResizeBilinearLayer (now deprecated and removed) and an output
2361  //
2362  // This test verifies that we can still deserialize this old-style model by replacing
2363  // the ResizeBilinearLayer with an equivalent ResizeLayer
2364  const std::vector<uint8_t> resizeBilinearModel =
2365  {
2366  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
2367  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
2368  0x50, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
2369  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD4, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
2370  0x04, 0x00, 0x00, 0x00, 0xC2, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
2371  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x8A, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
2372  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
2373  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2374  0x38, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
2375  0x00, 0x1A, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
2376  0x34, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x12, 0x00, 0x08, 0x00, 0x0C, 0x00,
2377  0x07, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
2378  0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00,
2379  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00,
2380  0x20, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x69, 0x7A, 0x65, 0x42, 0x69, 0x6C, 0x69,
2381  0x6E, 0x65, 0x61, 0x72, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
2382  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
2383  0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2384  0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
2385  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2386  0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
2387  0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
2388  0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00,
2389  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
2390  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2391  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00,
2392  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00,
2393  0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
2394  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
2395  0x00, 0x00, 0x05, 0x00, 0x00, 0x00
2396  };
2397 
2398  armnn::INetworkPtr deserializedNetwork =
2399  DeserializeNetwork(std::string(resizeBilinearModel.begin(), resizeBilinearModel.end()));
2400  CHECK(deserializedNetwork);
2401 
2402  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2403  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2404 
2405  armnn::ResizeDescriptor descriptor;
2406  descriptor.m_TargetWidth = 4u;
2407  descriptor.m_TargetHeight = 2u;
2408 
2409  ResizeBilinearLayerVerifier verifier("resizeBilinear", { inputInfo }, { outputInfo }, descriptor);
2410  deserializedNetwork->ExecuteStrategy(verifier);
2411 }
2412 
2413 TEST_CASE("SerializeShape")
2414 {
2415  const std::string layerName("shape");
2416  const armnn::TensorInfo inputInfo({1, 3, 3, 1}, armnn::DataType::Signed32);
2417  const armnn::TensorInfo outputInfo({ 4 }, armnn::DataType::Signed32);
2418 
2420  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2421  armnn::IConnectableLayer* const shapeLayer = network->AddShapeLayer(layerName.c_str());
2422  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2423 
2424  inputLayer->GetOutputSlot(0).Connect(shapeLayer->GetInputSlot(0));
2425  shapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2426 
2427  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2428  shapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2429 
2430  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2431  CHECK(deserializedNetwork);
2432 
2433  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
2434 
2435  deserializedNetwork->ExecuteStrategy(verifier);
2436 }
2437 
2438 TEST_CASE("SerializeSlice")
2439 {
2440  const std::string layerName{"slice"};
2441 
2442  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2443  const armnn::TensorInfo outputInfo = armnn::TensorInfo({2, 2, 2, 1}, armnn::DataType::Float32);
2444 
2445  armnn::SliceDescriptor descriptor({ 0, 0, 1, 0}, {2, 2, 2, 1});
2446 
2448 
2449  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2450  armnn::IConnectableLayer* const sliceLayer = network->AddSliceLayer(descriptor, layerName.c_str());
2451  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2452 
2453  inputLayer->GetOutputSlot(0).Connect(sliceLayer->GetInputSlot(0));
2454  sliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2455 
2456  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2457  sliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2458 
2459  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2460  CHECK(deserializedNetwork);
2461 
2462  LayerVerifierBaseWithDescriptor<armnn::SliceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2463  deserializedNetwork->ExecuteStrategy(verifier);
2464 }
2465 
2466 TEST_CASE("SerializeSoftmax")
2467 {
2468  const std::string layerName("softmax");
2469  const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32);
2470 
2471  armnn::SoftmaxDescriptor descriptor;
2472  descriptor.m_Beta = 1.0f;
2473 
2475  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2476  armnn::IConnectableLayer* const softmaxLayer = network->AddSoftmaxLayer(descriptor, layerName.c_str());
2477  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2478 
2479  inputLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
2480  softmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2481 
2482  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2483  softmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
2484 
2485  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2486  CHECK(deserializedNetwork);
2487 
2488  LayerVerifierBaseWithDescriptor<armnn::SoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
2489  deserializedNetwork->ExecuteStrategy(verifier);
2490 }
2491 
2492 TEST_CASE("SerializeSpaceToBatchNd")
2493 {
2494  const std::string layerName("spaceToBatchNd");
2495  const armnn::TensorInfo inputInfo({2, 1, 2, 4}, armnn::DataType::Float32);
2496  const armnn::TensorInfo outputInfo({8, 1, 1, 3}, armnn::DataType::Float32);
2497 
2500  desc.m_BlockShape = {2, 2};
2501  desc.m_PadList = {{0, 0}, {2, 0}};
2502 
2504  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2505  armnn::IConnectableLayer* const spaceToBatchNdLayer = network->AddSpaceToBatchNdLayer(desc, layerName.c_str());
2506  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2507 
2508  inputLayer->GetOutputSlot(0).Connect(spaceToBatchNdLayer->GetInputSlot(0));
2509  spaceToBatchNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2510 
2511  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2512  spaceToBatchNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2513 
2514  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2515  CHECK(deserializedNetwork);
2516 
2518  layerName, {inputInfo}, {outputInfo}, desc);
2519  deserializedNetwork->ExecuteStrategy(verifier);
2520 }
2521 
2522 TEST_CASE("SerializeSpaceToDepth")
2523 {
2524  const std::string layerName("spaceToDepth");
2525 
2526  const armnn::TensorInfo inputInfo ({ 1, 16, 8, 3 }, armnn::DataType::Float32);
2527  const armnn::TensorInfo outputInfo({ 1, 8, 4, 12 }, armnn::DataType::Float32);
2528 
2530  desc.m_BlockSize = 2;
2531  desc.m_DataLayout = armnn::DataLayout::NHWC;
2532 
2534  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2535  armnn::IConnectableLayer* const spaceToDepthLayer = network->AddSpaceToDepthLayer(desc, layerName.c_str());
2536  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2537 
2538  inputLayer->GetOutputSlot(0).Connect(spaceToDepthLayer->GetInputSlot(0));
2539  spaceToDepthLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2540 
2541  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2542  spaceToDepthLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2543 
2544  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2545  CHECK(deserializedNetwork);
2546 
2548  layerName, {inputInfo}, {outputInfo}, desc);
2549  deserializedNetwork->ExecuteStrategy(verifier);
2550 }
2551 
2552 TEST_CASE("SerializeSplitter")
2553 {
2554  const unsigned int numViews = 3;
2555  const unsigned int numDimensions = 4;
2556  const unsigned int inputShape[] = {1, 18, 4, 4};
2557  const unsigned int outputShape[] = {1, 6, 4, 4};
2558 
2559  // This is modelled on how the caffe parser sets up a splitter layer to partition an input along dimension one.
2560  unsigned int splitterDimSizes[4] = {static_cast<unsigned int>(inputShape[0]),
2561  static_cast<unsigned int>(inputShape[1]),
2562  static_cast<unsigned int>(inputShape[2]),
2563  static_cast<unsigned int>(inputShape[3])};
2564  splitterDimSizes[1] /= numViews;
2565  armnn::ViewsDescriptor desc(numViews, numDimensions);
2566 
2567  for (unsigned int g = 0; g < numViews; ++g)
2568  {
2569  desc.SetViewOriginCoord(g, 1, splitterDimSizes[1] * g);
2570 
2571  for (unsigned int dimIdx=0; dimIdx < 4; dimIdx++)
2572  {
2573  desc.SetViewSize(g, dimIdx, splitterDimSizes[dimIdx]);
2574  }
2575  }
2576 
2577  const std::string layerName("splitter");
2578  const armnn::TensorInfo inputInfo(numDimensions, inputShape, armnn::DataType::Float32);
2579  const armnn::TensorInfo outputInfo(numDimensions, outputShape, armnn::DataType::Float32);
2580 
2582  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2583  armnn::IConnectableLayer* const splitterLayer = network->AddSplitterLayer(desc, layerName.c_str());
2584  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2585  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2586  armnn::IConnectableLayer* const outputLayer2 = network->AddOutputLayer(2);
2587 
2588  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
2589  splitterLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2590  splitterLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2591  splitterLayer->GetOutputSlot(2).Connect(outputLayer2->GetInputSlot(0));
2592 
2593  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2594  splitterLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2595  splitterLayer->GetOutputSlot(1).SetTensorInfo(outputInfo);
2596  splitterLayer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2597 
2598  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2599  CHECK(deserializedNetwork);
2600 
2602  layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc);
2603  deserializedNetwork->ExecuteStrategy(verifier);
2604 }
2605 
2606 TEST_CASE("SerializeStack")
2607 {
2608  const std::string layerName("stack");
2609 
2610  armnn::TensorInfo inputTensorInfo ({4, 3, 5}, armnn::DataType::Float32);
2611  armnn::TensorInfo outputTensorInfo({4, 3, 2, 5}, armnn::DataType::Float32);
2612 
2613  armnn::StackDescriptor descriptor(2, 2, {4, 3, 5});
2614 
2616  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
2617  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
2618  armnn::IConnectableLayer* const stackLayer = network->AddStackLayer(descriptor, layerName.c_str());
2619  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2620 
2621  inputLayer1->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(0));
2622  inputLayer2->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(1));
2623  stackLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2624 
2625  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2626  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2627  stackLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2628 
2629  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2630  CHECK(deserializedNetwork);
2631 
2633  layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor);
2634  deserializedNetwork->ExecuteStrategy(verifier);
2635 }
2636 
2637 TEST_CASE("SerializeStandIn")
2638 {
2639  const std::string layerName("standIn");
2640 
2641  armnn::TensorInfo tensorInfo({ 1u }, armnn::DataType::Float32);
2642  armnn::StandInDescriptor descriptor(2u, 2u);
2643 
2645  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2646  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2647  armnn::IConnectableLayer* const standInLayer = network->AddStandInLayer(descriptor, layerName.c_str());
2648  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2649  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2650 
2651  inputLayer0->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(0));
2652  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2653 
2654  inputLayer1->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(1));
2655  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2656 
2657  standInLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2658  standInLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2659 
2660  standInLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2661  standInLayer->GetOutputSlot(1).SetTensorInfo(tensorInfo);
2662 
2663  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2664  CHECK(deserializedNetwork);
2665 
2667  layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor);
2668  deserializedNetwork->ExecuteStrategy(verifier);
2669 }
2670 
2671 TEST_CASE("SerializeStridedSlice")
2672 {
2673  const std::string layerName("stridedSlice");
2674  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2675  const armnn::TensorInfo outputInfo = armnn::TensorInfo({3, 1}, armnn::DataType::Float32);
2676 
2677  armnn::StridedSliceDescriptor desc({0, 0, 1, 0}, {1, 1, 1, 1}, {1, 1, 1, 1});
2678  desc.m_EndMask = (1 << 4) - 1;
2679  desc.m_ShrinkAxisMask = (1 << 1) | (1 << 2);
2680  desc.m_DataLayout = armnn::DataLayout::NCHW;
2681 
2683  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2684  armnn::IConnectableLayer* const stridedSliceLayer = network->AddStridedSliceLayer(desc, layerName.c_str());
2685  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2686 
2687  inputLayer->GetOutputSlot(0).Connect(stridedSliceLayer->GetInputSlot(0));
2688  stridedSliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2689 
2690  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2691  stridedSliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2692 
2693  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2694  CHECK(deserializedNetwork);
2695 
2697  layerName, {inputInfo}, {outputInfo}, desc);
2698  deserializedNetwork->ExecuteStrategy(verifier);
2699 }
2700 
2701 TEST_CASE("SerializeSubtraction")
2702 {
2703  const std::string layerName("subtraction");
2704  const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32);
2705 
2707  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2708  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2709  armnn::IConnectableLayer* const subtractionLayer = network->AddSubtractionLayer(layerName.c_str());
2710  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2711 
2712  inputLayer0->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(0));
2713  inputLayer1->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(1));
2714  subtractionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2715 
2716  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
2717  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
2718  subtractionLayer->GetOutputSlot(0).SetTensorInfo(info);
2719 
2720  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2721  CHECK(deserializedNetwork);
2722 
2723  LayerVerifierBase verifier(layerName, {info, info}, {info});
2724  deserializedNetwork->ExecuteStrategy(verifier);
2725 }
2726 
2727 TEST_CASE("SerializeSwitch")
2728 {
2729  class SwitchLayerVerifier : public LayerVerifierBase
2730  {
2731  public:
2732  SwitchLayerVerifier(const std::string& layerName,
2733  const std::vector<armnn::TensorInfo>& inputInfos,
2734  const std::vector<armnn::TensorInfo>& outputInfos)
2735  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
2736 
2737  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2738  const armnn::BaseDescriptor& descriptor,
2739  const std::vector<armnn::ConstTensor>& constants,
2740  const char* name,
2741  const armnn::LayerBindingId id = 0) override
2742  {
2743  armnn::IgnoreUnused(descriptor, constants, id);
2744  switch (layer->GetType())
2745  {
2746  case armnn::LayerType::Input: break;
2747  case armnn::LayerType::Output: break;
2748  case armnn::LayerType::Constant: break;
2750  {
2751  VerifyNameAndConnections(layer, name);
2752  break;
2753  }
2754  default:
2755  {
2756  throw armnn::Exception("Unexpected layer type in Switch test model");
2757  }
2758  }
2759  }
2760  };
2761 
2762  const std::string layerName("switch");
2763  const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32, 0.0f, 0, true);
2764 
2765  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2766  armnn::ConstTensor constTensor(info, constantData);
2767 
2769  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2770  armnn::IConnectableLayer* const constantLayer = network->AddConstantLayer(constTensor, "constant");
2771  armnn::IConnectableLayer* const switchLayer = network->AddSwitchLayer(layerName.c_str());
2772  armnn::IConnectableLayer* const trueOutputLayer = network->AddOutputLayer(0);
2773  armnn::IConnectableLayer* const falseOutputLayer = network->AddOutputLayer(1);
2774 
2775  inputLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(0));
2776  constantLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(1));
2777  switchLayer->GetOutputSlot(0).Connect(trueOutputLayer->GetInputSlot(0));
2778  switchLayer->GetOutputSlot(1).Connect(falseOutputLayer->GetInputSlot(0));
2779 
2780  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2781  constantLayer->GetOutputSlot(0).SetTensorInfo(info);
2782  switchLayer->GetOutputSlot(0).SetTensorInfo(info);
2783  switchLayer->GetOutputSlot(1).SetTensorInfo(info);
2784 
2785  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2786  CHECK(deserializedNetwork);
2787 
2788  SwitchLayerVerifier verifier(layerName, {info, info}, {info, info});
2789  deserializedNetwork->ExecuteStrategy(verifier);
2790 }
2791 
2792 TEST_CASE("SerializeTranspose")
2793 {
2794  const std::string layerName("transpose");
2795  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
2796  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
2797 
2798  armnn::TransposeDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
2799 
2801  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2802  armnn::IConnectableLayer* const transposeLayer = network->AddTransposeLayer(descriptor, layerName.c_str());
2803  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2804 
2805  inputLayer->GetOutputSlot(0).Connect(transposeLayer->GetInputSlot(0));
2806  transposeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2807 
2808  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2809  transposeLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2810 
2811  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2812  CHECK(deserializedNetwork);
2813 
2815  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
2816  deserializedNetwork->ExecuteStrategy(verifier);
2817 }
2818 
2819 TEST_CASE("SerializeTransposeConvolution2d")
2820 {
2821  const std::string layerName("transposeConvolution2d");
2822  const armnn::TensorInfo inputInfo ({ 1, 7, 7, 1 }, armnn::DataType::Float32);
2823  const armnn::TensorInfo outputInfo({ 1, 9, 9, 1 }, armnn::DataType::Float32);
2824 
2825  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
2826  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
2827 
2828  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
2829  armnn::ConstTensor weights(weightsInfo, weightsData);
2830 
2831  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
2832  armnn::ConstTensor biases(biasesInfo, biasesData);
2833 
2835  descriptor.m_PadLeft = 1;
2836  descriptor.m_PadRight = 1;
2837  descriptor.m_PadTop = 1;
2838  descriptor.m_PadBottom = 1;
2839  descriptor.m_StrideX = 1;
2840  descriptor.m_StrideY = 1;
2841  descriptor.m_BiasEnabled = true;
2843 
2845  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2846  armnn::IConnectableLayer* const convLayer =
2847  network->AddTransposeConvolution2dLayer(descriptor,
2848  weights,
2850  layerName.c_str());
2851  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2852 
2853  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2854  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2855 
2856  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2857  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2858 
2859  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2860  CHECK(deserializedNetwork);
2861 
2862  const std::vector<armnn::ConstTensor> constants {weights, biases};
2864  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
2865  deserializedNetwork->ExecuteStrategy(verifier);
2866 }
2867 
2868 TEST_CASE("SerializeDeserializeNonLinearNetwork")
2869 {
2870  class ConstantLayerVerifier : public LayerVerifierBase
2871  {
2872  public:
2873  ConstantLayerVerifier(const std::string& layerName,
2874  const std::vector<armnn::TensorInfo>& inputInfos,
2875  const std::vector<armnn::TensorInfo>& outputInfos,
2876  const armnn::ConstTensor& layerInput)
2877  : LayerVerifierBase(layerName, inputInfos, outputInfos)
2878  , m_LayerInput(layerInput) {}
2879 
2880  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2881  const armnn::BaseDescriptor& descriptor,
2882  const std::vector<armnn::ConstTensor>& constants,
2883  const char* name,
2884  const armnn::LayerBindingId id = 0) override
2885  {
2886  armnn::IgnoreUnused(descriptor, constants, id);
2887  switch (layer->GetType())
2888  {
2889  case armnn::LayerType::Input: break;
2890  case armnn::LayerType::Output: break;
2891  case armnn::LayerType::Addition: break;
2893  {
2894  VerifyNameAndConnections(layer, name);
2895  CompareConstTensor(constants.at(0), m_LayerInput);
2896  break;
2897  }
2898  default:
2899  {
2900  throw armnn::Exception("Unexpected layer type in test model");
2901  }
2902  }
2903  }
2904 
2905  private:
2906  armnn::ConstTensor m_LayerInput;
2907  };
2908 
2909  const std::string layerName("constant");
2910  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
2911 
2912  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2913  armnn::ConstTensor constTensor(info, constantData);
2914 
2916  armnn::IConnectableLayer* input = network->AddInputLayer(0);
2917  armnn::IConnectableLayer* add = network->AddAdditionLayer();
2918  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
2919  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
2920 
2921  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
2922  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
2923  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2924 
2925  input->GetOutputSlot(0).SetTensorInfo(info);
2926  constant->GetOutputSlot(0).SetTensorInfo(info);
2927  add->GetOutputSlot(0).SetTensorInfo(info);
2928 
2929  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2930  CHECK(deserializedNetwork);
2931 
2932  ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
2933  deserializedNetwork->ExecuteStrategy(verifier);
2934 }
2935 
2936 }
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
float m_ScaleW
Center size encoding scale weight.
bool m_BiasEnabled
Enable/disable bias.
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
uint32_t m_PadBottom
Padding bottom value in the height dimension.
A ReshapeDescriptor for the ReshapeLayer.
armnn::INetworkPtr DeserializeNetwork(const std::string &serializerString)
uint32_t m_PadBack
Padding back value in the depth dimension.
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ComparisonDescriptor for the ComparisonLayer.
Definition: Descriptors.hpp:89
float m_ScaleX
Center size encoding scale x.
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A Convolution2dDescriptor for the Convolution2dLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
bool m_BiasEnabled
Enable/disable bias.
ResizeMethod m_Method
The Interpolation method to use (Bilinear, NearestNeighbor).
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
float m_Beta
Exponentiation value.
The padding fields don&#39;t count and are ignored.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
Definition: Descriptors.hpp:81
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
A LogicalBinaryDescriptor for the LogicalBinaryLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Copyright (c) 2021 ARM Limited and Contributors.
DataLayout m_DataLayout
The data layout to be used (NCDHW, NDHWC).
void IgnoreUnused(Ts &&...)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_DilationY
Dilation along y axis.
int32_t m_EndMask
End mask value.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_DilationY
Dilation factor value for height dimension.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:290
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A ResizeBilinearDescriptor for the ResizeBilinearLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
Base class for all descriptors.
Definition: Descriptors.hpp:22
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
A StackDescriptor for the StackLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
constexpr char const * GetUnaryOperationAsCString(UnaryOperation operation)
Definition: TypesUtils.hpp:71
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
A PadDescriptor for the PadLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
DataType
Definition: Types.hpp:48
float m_NmsIouThreshold
Intersection over union threshold.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
uint32_t m_DilationX
Dilation factor value for width dimension.
uint32_t m_PadTop
Padding top value in the height dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A L2NormalizationDescriptor for the L2NormalizationLayer.
void VerifyNameAndConnections(const armnn::IConnectableLayer *layer, const char *name)
An ArgMinMaxDescriptor for ArgMinMaxLayer.
Definition: Descriptors.hpp:67
An OriginsDescriptor for the ConcatLayer.
A ReduceDescriptor for the REDUCE operators.
A FullyConnectedDescriptor for the FullyConnectedLayer.
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
uint32_t m_TargetWidth
Target width value.
A GatherDescriptor for the GatherLayer.
uint32_t m_NumClasses
Number of classes.
bool m_HalfPixelCenters
Half Pixel Centers.
uint32_t m_PadTop
Padding top value in the height dimension.
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:473
A StandInDescriptor for the StandIn layer.
bool m_UseRegularNms
Use Regular NMS.
uint32_t m_PadFront
Padding front value in the depth dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadLeft
Padding left value in the width dimension.
uint32_t m_TargetHeight
Target height value.
A SliceDescriptor for the SliceLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A Convolution3dDescriptor for the Convolution3dLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
PaddingMode m_PaddingMode
Specifies the Padding mode (Constant, Reflect or Symmetric)
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A Pooling3dDescriptor for the Pooling3dLayer.
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
float m_ScaleH
Center size encoding scale height.
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
DataLayout m_DataLayout
The data layout to be used (NDHWC, NCDHW).
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_PadLeft
Padding left value in the width dimension.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
bool m_AlignCorners
Aligned corners.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int32_t m_Axis
The axis in params to gather indices from.
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
uint32_t m_PadTop
Padding top value in the height dimension.
void CompareConstTensor(const armnn::ConstTensor &tensor1, const armnn::ConstTensor &tensor2)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
A MeanDescriptor for the MeanLayer.
UnaryOperation
Definition: Types.hpp:124
uint32_t m_PadRight
Padding right value in the width dimension.
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
void SetQuantizationOffset(int32_t offset)
Definition: Tensor.cpp:489
float m_ScaleY
Center size encoding scale y.
OriginsDescriptor CreateDescriptorForConcatenation(TensorShapeIt first, TensorShapeIt last, unsigned int concatenationDimension)
Convenience template to create an OriginsDescriptor to use when creating a ConcatLayer for performing...
float m_NmsScoreThreshold
NMS score threshold.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:241
virtual int Connect(IInputSlot &destination)=0
A Pooling2dDescriptor for the Pooling2dLayer.
std::string SerializeNetwork(const armnn::INetwork &network)
A NormalizationDescriptor for the NormalizationLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
A ChannelShuffleDescriptor for the ChannelShuffle operator.
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:476
uint32_t m_DilationZ
Dilation along z axis.
A SoftmaxDescriptor for the SoftmaxLayer.
uint32_t m_StrideZ
Stride value when proceeding through input for the depth dimension.
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
uint32_t m_DilationY
Dilation along y axis.
A FillDescriptor for the FillLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A PermuteDescriptor for the PermuteLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
bool m_ConstantWeights
Enable/disable constant weights and biases.