ArmNN
 22.02
SerializerTests.cpp File Reference
#include "../Serializer.hpp"
#include "SerializerTestUtils.hpp"
#include <armnn/Descriptors.hpp>
#include <armnn/INetwork.hpp>
#include <armnn/TypesUtils.hpp>
#include <armnn/LstmParams.hpp>
#include <armnn/QuantizedLstmParams.hpp>
#include <armnnDeserializer/IDeserializer.hpp>
#include <armnn/utility/IgnoreUnused.hpp>
#include <random>
#include <vector>
#include <doctest/doctest.h>

Go to the source code of this file.

Functions

 TEST_SUITE ("SerializerTests")
 

Function Documentation

◆ TEST_SUITE()

TEST_SUITE ( "SerializerTests"  )

Definition at line 24 of file SerializerTests.cpp.

References armnn::Addition, ARMNN_NO_DEPRECATE_WARN_BEGIN, ARMNN_NO_DEPRECATE_WARN_END, armnn::Average, armnn::Bilinear, armnn::Boolean, CompareConstTensor(), armnn::Concat, IOutputSlot::Connect(), armnn::Constant, INetwork::Create(), armnn::CreateDescriptorForConcatenation(), DeserializeNetwork(), armnn::Equal, armnn::Exclude, LayerVerifierBase::ExecuteStrategy(), LayerVerifierBaseWithDescriptor< Descriptor >::ExecuteStrategy(), LayerVerifierBaseWithDescriptorAndConstants< Descriptor >::ExecuteStrategy(), armnn::Float32, armnn::Floor, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), TensorInfo::GetShape(), IConnectableLayer::GetType(), armnn::GetUnaryOperationAsCString(), armnn::Greater, armnn::IgnoreUnused(), armnn::info, armnn::Input, LayerVerifierBase::LayerVerifierBase(), armnn::LogicalAnd, ResizeDescriptor::m_AlignCorners, GatherDescriptor::m_Axis, MeanDescriptor::m_Axis, SoftmaxDescriptor::m_Beta, FullyConnectedDescriptor::m_BiasEnabled, Convolution2dDescriptor::m_BiasEnabled, Convolution3dDescriptor::m_BiasEnabled, DepthwiseConvolution2dDescriptor::m_BiasEnabled, TransposeConvolution2dDescriptor::m_BiasEnabled, SpaceToDepthDescriptor::m_BlockSize, FullyConnectedDescriptor::m_ConstantWeights, Pooling2dDescriptor::m_DataLayout, Pooling3dDescriptor::m_DataLayout, Convolution2dDescriptor::m_DataLayout, Convolution3dDescriptor::m_DataLayout, DepthwiseConvolution2dDescriptor::m_DataLayout, NormalizationDescriptor::m_DataLayout, L2NormalizationDescriptor::m_DataLayout, BatchToSpaceNdDescriptor::m_DataLayout, ResizeDescriptor::m_DataLayout, SpaceToBatchNdDescriptor::m_DataLayout, TransposeConvolution2dDescriptor::m_DataLayout, DetectionPostProcessDescriptor::m_DetectionsPerClass, Convolution2dDescriptor::m_DilationX, Convolution3dDescriptor::m_DilationX, DepthwiseConvolution2dDescriptor::m_DilationX, Convolution2dDescriptor::m_DilationY, Convolution3dDescriptor::m_DilationY, DepthwiseConvolution2dDescriptor::m_DilationY, Convolution3dDescriptor::m_DilationZ, StridedSliceDescriptor::m_EndMask, BatchNormalizationDescriptor::m_Eps, ArgMinMaxDescriptor::m_Function, InstanceNormalizationDescriptor::m_Gamma, ResizeDescriptor::m_HalfPixelCenters, DetectionPostProcessDescriptor::m_MaxClassesPerDetection, DetectionPostProcessDescriptor::m_MaxDetections, ResizeDescriptor::m_Method, DetectionPostProcessDescriptor::m_NmsIouThreshold, DetectionPostProcessDescriptor::m_NmsScoreThreshold, DetectionPostProcessDescriptor::m_NumClasses, Convolution3dDescriptor::m_PadBack, Convolution2dDescriptor::m_PadBottom, Convolution3dDescriptor::m_PadBottom, DepthwiseConvolution2dDescriptor::m_PadBottom, TransposeConvolution2dDescriptor::m_PadBottom, PadDescriptor::m_PaddingMode, Convolution3dDescriptor::m_PadFront, Convolution2dDescriptor::m_PadLeft, Convolution3dDescriptor::m_PadLeft, DepthwiseConvolution2dDescriptor::m_PadLeft, TransposeConvolution2dDescriptor::m_PadLeft, Convolution2dDescriptor::m_PadRight, Convolution3dDescriptor::m_PadRight, DepthwiseConvolution2dDescriptor::m_PadRight, TransposeConvolution2dDescriptor::m_PadRight, Convolution2dDescriptor::m_PadTop, Convolution3dDescriptor::m_PadTop, DepthwiseConvolution2dDescriptor::m_PadTop, TransposeConvolution2dDescriptor::m_PadTop, DetectionPostProcessDescriptor::m_ScaleH, DetectionPostProcessDescriptor::m_ScaleW, DetectionPostProcessDescriptor::m_ScaleX, DetectionPostProcessDescriptor::m_ScaleY, Convolution2dDescriptor::m_StrideX, Convolution3dDescriptor::m_StrideX, DepthwiseConvolution2dDescriptor::m_StrideX, TransposeConvolution2dDescriptor::m_StrideX, Convolution2dDescriptor::m_StrideY, Convolution3dDescriptor::m_StrideY, DepthwiseConvolution2dDescriptor::m_StrideY, TransposeConvolution2dDescriptor::m_StrideY, Convolution3dDescriptor::m_StrideZ, ResizeDescriptor::m_TargetHeight, ResizeDescriptor::m_TargetWidth, FullyConnectedDescriptor::m_TransposeWeightMatrix, DetectionPostProcessDescriptor::m_UseRegularNms, ReduceDescriptor::m_vAxis, armnn::Max, armnn::Merge, armnn::NCHW, armnn::NDHWC, armnn::NearestNeighbor, armnn::NHWC, armnn::NotEqual, armnn::Output, armnn::QAsymmU8, armnn::Reflect, armnn::Resize, SerializeNetwork(), IOutputSlot::SetTensorInfo(), ViewsDescriptor::SetViewOriginCoord(), ViewsDescriptor::SetViewSize(), armnn::Signed32, armnn::Signed64, armnn::Sum, armnn::Switch, and LayerVerifierBase::VerifyNameAndConnections().

25 {
26 
27 TEST_CASE("SerializeAddition")
28 {
29  const std::string layerName("addition");
30  const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32);
31 
33  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
34  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
35  armnn::IConnectableLayer* const additionLayer = network->AddAdditionLayer(layerName.c_str());
36  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
37 
38  inputLayer0->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
39  inputLayer1->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
40  additionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
41 
42  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
43  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
44  additionLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
45 
46  std::string serializedNetwork = SerializeNetwork(*network);
47  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(serializedNetwork);
48  CHECK(deserializedNetwork);
49 
50  LayerVerifierBase verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo});
51  deserializedNetwork->ExecuteStrategy(verifier);
52 }
53 
54 void SerializeArgMinMaxTest(armnn::DataType dataType)
55 {
56  const std::string layerName("argminmax");
57  const armnn::TensorInfo inputInfo({1, 2, 3}, armnn::DataType::Float32);
58  const armnn::TensorInfo outputInfo({1, 3}, dataType);
59 
60  armnn::ArgMinMaxDescriptor descriptor;
62  descriptor.m_Axis = 1;
63 
65  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
66  armnn::IConnectableLayer* const argMinMaxLayer = network->AddArgMinMaxLayer(descriptor, layerName.c_str());
67  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
68 
69  inputLayer->GetOutputSlot(0).Connect(argMinMaxLayer->GetInputSlot(0));
70  argMinMaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
71 
72  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
73  argMinMaxLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
74 
75  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
76  CHECK(deserializedNetwork);
77 
79  {inputInfo},
80  {outputInfo},
81  descriptor);
82  deserializedNetwork->ExecuteStrategy(verifier);
83 }
84 
85 TEST_CASE("SerializeArgMinMaxSigned32")
86 {
87  SerializeArgMinMaxTest(armnn::DataType::Signed32);
88 }
89 
90 TEST_CASE("SerializeArgMinMaxSigned64")
91 {
92  SerializeArgMinMaxTest(armnn::DataType::Signed64);
93 }
94 
95 TEST_CASE("SerializeBatchNormalization")
96 {
97  const std::string layerName("batchNormalization");
98  const armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32);
99  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
100 
101  const armnn::TensorInfo meanInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
102  const armnn::TensorInfo varianceInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
103  const armnn::TensorInfo betaInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
104  const armnn::TensorInfo gammaInfo({1}, armnn::DataType::Float32, 0.0f, 0, true);
105 
107  descriptor.m_Eps = 0.0010000000475f;
108  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
109 
110  std::vector<float> meanData({5.0});
111  std::vector<float> varianceData({2.0});
112  std::vector<float> betaData({1.0});
113  std::vector<float> gammaData({0.0});
114 
115  std::vector<armnn::ConstTensor> constants;
116  constants.emplace_back(armnn::ConstTensor(meanInfo, meanData));
117  constants.emplace_back(armnn::ConstTensor(varianceInfo, varianceData));
118  constants.emplace_back(armnn::ConstTensor(betaInfo, betaData));
119  constants.emplace_back(armnn::ConstTensor(gammaInfo, gammaData));
120 
122  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
123  armnn::IConnectableLayer* const batchNormalizationLayer =
124  network->AddBatchNormalizationLayer(descriptor,
125  constants[0],
126  constants[1],
127  constants[2],
128  constants[3],
129  layerName.c_str());
130  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
131 
132  inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
133  batchNormalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
134 
135  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
136  batchNormalizationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
137 
138  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
139  CHECK(deserializedNetwork);
140 
142  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
143  deserializedNetwork->ExecuteStrategy(verifier);
144 }
145 
146 TEST_CASE("SerializeBatchToSpaceNd")
147 {
148  const std::string layerName("spaceToBatchNd");
149  const armnn::TensorInfo inputInfo({4, 1, 2, 2}, armnn::DataType::Float32);
150  const armnn::TensorInfo outputInfo({1, 1, 4, 4}, armnn::DataType::Float32);
151 
154  desc.m_BlockShape = {2, 2};
155  desc.m_Crops = {{0, 0}, {0, 0}};
156 
158  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
159  armnn::IConnectableLayer* const batchToSpaceNdLayer = network->AddBatchToSpaceNdLayer(desc, layerName.c_str());
160  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
161 
162  inputLayer->GetOutputSlot(0).Connect(batchToSpaceNdLayer->GetInputSlot(0));
163  batchToSpaceNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
164 
165  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
166  batchToSpaceNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
167 
168  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
169  CHECK(deserializedNetwork);
170 
172  {inputInfo},
173  {outputInfo},
174  desc);
175  deserializedNetwork->ExecuteStrategy(verifier);
176 }
177 
178 TEST_CASE("SerializeCast")
179 {
180  const std::string layerName("cast");
181 
182  const armnn::TensorShape shape{1, 5, 2, 3};
183 
186 
188  armnn::IConnectableLayer* inputLayer = network->AddInputLayer(0);
189  armnn::IConnectableLayer* castLayer = network->AddCastLayer(layerName.c_str());
190  armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0);
191 
192  inputLayer->GetOutputSlot(0).Connect(castLayer->GetInputSlot(0));
193  castLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
194 
195  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
196  castLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
197 
198  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
199  CHECK(deserializedNetwork);
200 
201  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
202  deserializedNetwork->ExecuteStrategy(verifier);
203 }
204 
205 TEST_CASE("SerializeChannelShuffle")
206 {
207  const std::string layerName("channelShuffle");
208  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
209  const armnn::TensorInfo outputInfo({1, 9}, armnn::DataType::Float32);
210 
211  armnn::ChannelShuffleDescriptor descriptor({3, 1});
212 
214  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
215  armnn::IConnectableLayer* const ChannelShuffleLayer =
216  network->AddChannelShuffleLayer(descriptor, layerName.c_str());
217  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
218 
219  inputLayer->GetOutputSlot(0).Connect(ChannelShuffleLayer->GetInputSlot(0));
220  ChannelShuffleLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
221 
222  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
223  ChannelShuffleLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
224 
225  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
226  CHECK(deserializedNetwork);
227 
229  layerName, {inputInfo}, {outputInfo}, descriptor);
230  deserializedNetwork->ExecuteStrategy(verifier);
231 }
232 
233 TEST_CASE("SerializeComparison")
234 {
235  const std::string layerName("comparison");
236 
237  const armnn::TensorShape shape{2, 1, 2, 4};
238 
241 
243 
245  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
246  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
247  armnn::IConnectableLayer* const comparisonLayer = network->AddComparisonLayer(descriptor, layerName.c_str());
248  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
249 
250  inputLayer0->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(0));
251  inputLayer1->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(1));
252  comparisonLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
253 
254  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
255  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
256  comparisonLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
257 
258  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
259  CHECK(deserializedNetwork);
260 
262  { inputInfo, inputInfo },
263  { outputInfo },
264  descriptor);
265  deserializedNetwork->ExecuteStrategy(verifier);
266 }
267 
268 TEST_CASE("SerializeConstant")
269 {
270  class ConstantLayerVerifier : public LayerVerifierBase
271  {
272  public:
273  ConstantLayerVerifier(const std::string& layerName,
274  const std::vector<armnn::TensorInfo>& inputInfos,
275  const std::vector<armnn::TensorInfo>& outputInfos,
276  const std::vector<armnn::ConstTensor>& constants)
277  : LayerVerifierBase(layerName, inputInfos, outputInfos)
278  , m_Constants(constants) {}
279 
280  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
281  const armnn::BaseDescriptor& descriptor,
282  const std::vector<armnn::ConstTensor>& constants,
283  const char* name,
284  const armnn::LayerBindingId id = 0) override
285  {
286  armnn::IgnoreUnused(descriptor, id);
287 
288  switch (layer->GetType())
289  {
290  case armnn::LayerType::Input: break;
291  case armnn::LayerType::Output: break;
292  case armnn::LayerType::Addition: break;
293  default:
294  {
295  this->VerifyNameAndConnections(layer, name);
296 
297  for (std::size_t i = 0; i < constants.size(); i++)
298  {
299  CompareConstTensor(constants[i], m_Constants[i]);
300  }
301  }
302  }
303  }
304 
305  private:
306  const std::vector<armnn::ConstTensor> m_Constants;
307  };
308 
309  const std::string layerName("constant");
310  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
311 
312  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
313  armnn::ConstTensor constTensor(info, constantData);
314 
316  armnn::IConnectableLayer* input = network->AddInputLayer(0);
317  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
318  armnn::IConnectableLayer* add = network->AddAdditionLayer();
319  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
320 
321  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
322  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
323  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
324 
325  input->GetOutputSlot(0).SetTensorInfo(info);
326  constant->GetOutputSlot(0).SetTensorInfo(info);
327  add->GetOutputSlot(0).SetTensorInfo(info);
328 
329  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
330  CHECK(deserializedNetwork);
331 
332  ConstantLayerVerifier verifier(layerName, {}, {info}, {constTensor});
333  deserializedNetwork->ExecuteStrategy(verifier);
334 }
335 
336 TEST_CASE("SerializeConvolution2d")
337 {
338  const std::string layerName("convolution2d");
339  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
340  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
341 
342  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
343  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
344 
345  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
346  armnn::ConstTensor weights(weightsInfo, weightsData);
347 
348  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
349  armnn::ConstTensor biases(biasesInfo, biasesData);
350 
352  descriptor.m_PadLeft = 1;
353  descriptor.m_PadRight = 1;
354  descriptor.m_PadTop = 1;
355  descriptor.m_PadBottom = 1;
356  descriptor.m_StrideX = 2;
357  descriptor.m_StrideY = 2;
358  descriptor.m_DilationX = 2;
359  descriptor.m_DilationY = 2;
360  descriptor.m_BiasEnabled = true;
362 
364  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
365  armnn::IConnectableLayer* const convLayer =
366  network->AddConvolution2dLayer(descriptor,
367  weights,
369  layerName.c_str());
370  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
371 
372  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
373  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
374 
375  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
376  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
377 
378  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
379  CHECK(deserializedNetwork);
380 
381  const std::vector<armnn::ConstTensor>& constants {weights, biases};
383  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
384  deserializedNetwork->ExecuteStrategy(verifier);
385 }
386 
387 TEST_CASE("SerializeConvolution2dWithPerAxisParams")
388 {
389  using namespace armnn;
390 
391  const std::string layerName("convolution2dWithPerAxis");
392  const TensorInfo inputInfo ({ 1, 3, 1, 2 }, DataType::QAsymmU8, 0.55f, 128);
393  const TensorInfo outputInfo({ 1, 3, 1, 3 }, DataType::QAsymmU8, 0.75f, 128);
394 
395  const std::vector<float> quantScales{ 0.75f, 0.65f, 0.85f };
396  constexpr unsigned int quantDimension = 0;
397 
398  const TensorInfo kernelInfo({ 3, 1, 1, 2 }, DataType::QSymmS8, quantScales, quantDimension, true);
399 
400  const std::vector<float> biasQuantScales{ 0.25f, 0.50f, 0.75f };
401  const TensorInfo biasInfo({ 3 }, DataType::Signed32, biasQuantScales, quantDimension, true);
402 
403  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
404  armnn::ConstTensor weights(kernelInfo, kernelData);
405  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
406  armnn::ConstTensor biases(biasInfo, biasData);
407 
408  Convolution2dDescriptor descriptor;
409  descriptor.m_StrideX = 1;
410  descriptor.m_StrideY = 1;
411  descriptor.m_PadLeft = 0;
412  descriptor.m_PadRight = 0;
413  descriptor.m_PadTop = 0;
414  descriptor.m_PadBottom = 0;
415  descriptor.m_BiasEnabled = true;
417 
419  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
420  armnn::IConnectableLayer* const convLayer =
421  network->AddConvolution2dLayer(descriptor,
422  weights,
424  layerName.c_str());
425  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
426 
427  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
428  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
429 
430  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
431  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
432 
433  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
434  CHECK(deserializedNetwork);
435 
436  const std::vector<armnn::ConstTensor>& constants {weights, biases};
438  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
439  deserializedNetwork->ExecuteStrategy(verifier);
440 }
441 
442 TEST_CASE("SerializeConvolution3d")
443 {
444  const std::string layerName("convolution3d");
445  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 5, 1 }, armnn::DataType::Float32);
446  const armnn::TensorInfo outputInfo({ 1, 2, 2, 2, 1 }, armnn::DataType::Float32);
447 
448  const armnn::TensorInfo weightsInfo({ 3, 3, 3, 1, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
449  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
450 
451  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
452  armnn::ConstTensor weights(weightsInfo, weightsData);
453 
454  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
455  armnn::ConstTensor biases(biasesInfo, biasesData);
456 
458  descriptor.m_PadLeft = 0;
459  descriptor.m_PadRight = 0;
460  descriptor.m_PadTop = 0;
461  descriptor.m_PadBottom = 0;
462  descriptor.m_PadFront = 0;
463  descriptor.m_PadBack = 0;
464  descriptor.m_DilationX = 1;
465  descriptor.m_DilationY = 1;
466  descriptor.m_DilationZ = 1;
467  descriptor.m_StrideX = 2;
468  descriptor.m_StrideY = 2;
469  descriptor.m_StrideZ = 2;
470  descriptor.m_BiasEnabled = true;
472 
474  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
475  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
476  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
477  armnn::IConnectableLayer* const convLayer = network->AddConvolution3dLayer(descriptor, layerName.c_str());
478  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
479 
480  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
481  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
482  biasesLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
483  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
484 
485  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
486  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
487  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
488  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
489 
490  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
491  CHECK(deserializedNetwork);
492 
494  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
495  deserializedNetwork->ExecuteStrategy(verifier);
496 }
497 
498 TEST_CASE("SerializeDepthToSpace")
499 {
500  const std::string layerName("depthToSpace");
501 
502  const armnn::TensorInfo inputInfo ({ 1, 8, 4, 12 }, armnn::DataType::Float32);
503  const armnn::TensorInfo outputInfo({ 1, 16, 8, 3 }, armnn::DataType::Float32);
504 
506  desc.m_BlockSize = 2;
507  desc.m_DataLayout = armnn::DataLayout::NHWC;
508 
510  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
511  armnn::IConnectableLayer* const depthToSpaceLayer = network->AddDepthToSpaceLayer(desc, layerName.c_str());
512  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
513 
514  inputLayer->GetOutputSlot(0).Connect(depthToSpaceLayer->GetInputSlot(0));
515  depthToSpaceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
516 
517  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
518  depthToSpaceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
519 
520  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
521  CHECK(deserializedNetwork);
522 
523  LayerVerifierBaseWithDescriptor<armnn::DepthToSpaceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
524  deserializedNetwork->ExecuteStrategy(verifier);
525 }
526 
527 TEST_CASE("SerializeDepthwiseConvolution2d")
528 {
529  const std::string layerName("depwiseConvolution2d");
530  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32);
531  const armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
532 
533  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
534  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
535 
536  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
537  armnn::ConstTensor weights(weightsInfo, weightsData);
538 
539  std::vector<int32_t> biasesData = GenerateRandomData<int32_t>(biasesInfo.GetNumElements());
540  armnn::ConstTensor biases(biasesInfo, biasesData);
541 
543  descriptor.m_PadLeft = 1;
544  descriptor.m_PadRight = 1;
545  descriptor.m_PadTop = 1;
546  descriptor.m_PadBottom = 1;
547  descriptor.m_StrideX = 2;
548  descriptor.m_StrideY = 2;
549  descriptor.m_DilationX = 2;
550  descriptor.m_DilationY = 2;
551  descriptor.m_BiasEnabled = true;
553 
555  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
556  armnn::IConnectableLayer* const depthwiseConvLayer =
557  network->AddDepthwiseConvolution2dLayer(descriptor,
558  weights,
560  layerName.c_str());
561  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
562 
563  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
564  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
565 
566  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
567  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
568 
569  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
570  CHECK(deserializedNetwork);
571 
572  const std::vector<armnn::ConstTensor>& constants {weights, biases};
574  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
575  deserializedNetwork->ExecuteStrategy(verifier);
576 }
577 
578 TEST_CASE("SerializeDepthwiseConvolution2dWithPerAxisParams")
579 {
580  using namespace armnn;
581 
582  const std::string layerName("depwiseConvolution2dWithPerAxis");
583  const TensorInfo inputInfo ({ 1, 3, 3, 2 }, DataType::QAsymmU8, 0.55f, 128);
584  const TensorInfo outputInfo({ 1, 2, 2, 4 }, DataType::QAsymmU8, 0.75f, 128);
585 
586  const std::vector<float> quantScales{ 0.75f, 0.80f, 0.90f, 0.95f };
587  const unsigned int quantDimension = 0;
588  TensorInfo kernelInfo({ 2, 2, 2, 2 }, DataType::QSymmS8, quantScales, quantDimension, true);
589 
590  const std::vector<float> biasQuantScales{ 0.25f, 0.35f, 0.45f, 0.55f };
591  constexpr unsigned int biasQuantDimension = 0;
592  TensorInfo biasInfo({ 4 }, DataType::Signed32, biasQuantScales, biasQuantDimension, true);
593 
594  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
595  armnn::ConstTensor weights(kernelInfo, kernelData);
596  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
597  armnn::ConstTensor biases(biasInfo, biasData);
598 
600  descriptor.m_StrideX = 1;
601  descriptor.m_StrideY = 1;
602  descriptor.m_PadLeft = 0;
603  descriptor.m_PadRight = 0;
604  descriptor.m_PadTop = 0;
605  descriptor.m_PadBottom = 0;
606  descriptor.m_DilationX = 1;
607  descriptor.m_DilationY = 1;
608  descriptor.m_BiasEnabled = true;
610 
612  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
613  armnn::IConnectableLayer* const depthwiseConvLayer =
614  network->AddDepthwiseConvolution2dLayer(descriptor,
615  weights,
617  layerName.c_str());
618  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
619 
620  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
621  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
622 
623  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
624  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
625 
626  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
627  CHECK(deserializedNetwork);
628 
629  const std::vector<armnn::ConstTensor>& constants {weights, biases};
631  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
632  deserializedNetwork->ExecuteStrategy(verifier);
633 }
634 
635 TEST_CASE("SerializeDequantize")
636 {
637  const std::string layerName("dequantize");
638  const armnn::TensorInfo inputInfo({ 1, 5, 2, 3 }, armnn::DataType::QAsymmU8, 0.5f, 1);
639  const armnn::TensorInfo outputInfo({ 1, 5, 2, 3 }, armnn::DataType::Float32);
640 
642  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
643  armnn::IConnectableLayer* const dequantizeLayer = network->AddDequantizeLayer(layerName.c_str());
644  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
645 
646  inputLayer->GetOutputSlot(0).Connect(dequantizeLayer->GetInputSlot(0));
647  dequantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
648 
649  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
650  dequantizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
651 
652  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
653  CHECK(deserializedNetwork);
654 
655  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
656  deserializedNetwork->ExecuteStrategy(verifier);
657 }
658 
659 TEST_CASE("SerializeDeserializeDetectionPostProcess")
660 {
661  const std::string layerName("detectionPostProcess");
662 
663  const std::vector<armnn::TensorInfo> inputInfos({
666  });
667 
668  const std::vector<armnn::TensorInfo> outputInfos({
673  });
674 
676  descriptor.m_UseRegularNms = true;
677  descriptor.m_MaxDetections = 3;
678  descriptor.m_MaxClassesPerDetection = 1;
679  descriptor.m_DetectionsPerClass =1;
680  descriptor.m_NmsScoreThreshold = 0.0;
681  descriptor.m_NmsIouThreshold = 0.5;
682  descriptor.m_NumClasses = 2;
683  descriptor.m_ScaleY = 10.0;
684  descriptor.m_ScaleX = 10.0;
685  descriptor.m_ScaleH = 5.0;
686  descriptor.m_ScaleW = 5.0;
687 
688  const armnn::TensorInfo anchorsInfo({ 6, 4 }, armnn::DataType::Float32, 0.0f, 0, true);
689  const std::vector<float> anchorsData({
690  0.5f, 0.5f, 1.0f, 1.0f,
691  0.5f, 0.5f, 1.0f, 1.0f,
692  0.5f, 0.5f, 1.0f, 1.0f,
693  0.5f, 10.5f, 1.0f, 1.0f,
694  0.5f, 10.5f, 1.0f, 1.0f,
695  0.5f, 100.5f, 1.0f, 1.0f
696  });
697  armnn::ConstTensor anchors(anchorsInfo, anchorsData);
698 
700  armnn::IConnectableLayer* const detectionLayer =
701  network->AddDetectionPostProcessLayer(descriptor, anchors, layerName.c_str());
702 
703  for (unsigned int i = 0; i < 2; i++)
704  {
705  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(static_cast<int>(i));
706  inputLayer->GetOutputSlot(0).Connect(detectionLayer->GetInputSlot(i));
707  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfos[i]);
708  }
709 
710  for (unsigned int i = 0; i < 4; i++)
711  {
712  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(static_cast<int>(i));
713  detectionLayer->GetOutputSlot(i).Connect(outputLayer->GetInputSlot(0));
714  detectionLayer->GetOutputSlot(i).SetTensorInfo(outputInfos[i]);
715  }
716 
717  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
718  CHECK(deserializedNetwork);
719 
720  const std::vector<armnn::ConstTensor>& constants {anchors};
722  layerName, inputInfos, outputInfos, descriptor, constants);
723  deserializedNetwork->ExecuteStrategy(verifier);
724 }
725 
726 TEST_CASE("SerializeDivision")
727 {
728  const std::string layerName("division");
729  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
730 
732  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
733  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
734  armnn::IConnectableLayer* const divisionLayer = network->AddDivisionLayer(layerName.c_str());
735  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
736 
737  inputLayer0->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(0));
738  inputLayer1->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(1));
739  divisionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
740 
741  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
742  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
743  divisionLayer->GetOutputSlot(0).SetTensorInfo(info);
744 
745  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
746  CHECK(deserializedNetwork);
747 
748  LayerVerifierBase verifier(layerName, {info, info}, {info});
749  deserializedNetwork->ExecuteStrategy(verifier);
750 }
751 
752 TEST_CASE("SerializeDeserializeComparisonEqual")
753 {
754  const std::string layerName("EqualLayer");
755  const armnn::TensorInfo inputTensorInfo1 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
756  const armnn::TensorInfo inputTensorInfo2 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
757  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Boolean);
758 
760  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
761  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
763  armnn::IConnectableLayer* const equalLayer = network->AddComparisonLayer(equalDescriptor, layerName.c_str());
764  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
765 
766  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
767  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo1);
768  inputLayer2->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
769  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo2);
770  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
771  equalLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
772 
773  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
774  CHECK(deserializedNetwork);
775 
776  LayerVerifierBase verifier(layerName, {inputTensorInfo1, inputTensorInfo2}, {outputTensorInfo});
777  deserializedNetwork->ExecuteStrategy(verifier);
778 }
779 
780 void SerializeElementwiseUnaryTest(armnn::UnaryOperation unaryOperation)
781 {
782  auto layerName = GetUnaryOperationAsCString(unaryOperation);
783 
784  const armnn::TensorShape shape{2, 1, 2, 2};
785 
788 
789  armnn::ElementwiseUnaryDescriptor descriptor(unaryOperation);
790 
792  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
793  armnn::IConnectableLayer* const elementwiseUnaryLayer =
794  network->AddElementwiseUnaryLayer(descriptor, layerName);
795  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
796 
797  inputLayer->GetOutputSlot(0).Connect(elementwiseUnaryLayer->GetInputSlot(0));
798  elementwiseUnaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
799 
800  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
801  elementwiseUnaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
802 
803  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
804 
805  CHECK(deserializedNetwork);
806 
808  verifier(layerName, { inputInfo }, { outputInfo }, descriptor);
809 
810  deserializedNetwork->ExecuteStrategy(verifier);
811 }
812 
813 TEST_CASE("SerializeElementwiseUnary")
814 {
815  using op = armnn::UnaryOperation;
816  std::initializer_list<op> allUnaryOperations = {op::Abs, op::Exp, op::Sqrt, op::Rsqrt, op::Neg,
817  op::LogicalNot, op::Log, op::Sin};
818 
819  for (auto unaryOperation : allUnaryOperations)
820  {
821  SerializeElementwiseUnaryTest(unaryOperation);
822  }
823 }
824 
825 TEST_CASE("SerializeFill")
826 {
827  const std::string layerName("fill");
828  const armnn::TensorInfo inputInfo({4}, armnn::DataType::Signed32);
829  const armnn::TensorInfo outputInfo({1, 3, 3, 1}, armnn::DataType::Float32);
830 
831  armnn::FillDescriptor descriptor(1.0f);
832 
834  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
835  armnn::IConnectableLayer* const fillLayer = network->AddFillLayer(descriptor, layerName.c_str());
836  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
837 
838  inputLayer->GetOutputSlot(0).Connect(fillLayer->GetInputSlot(0));
839  fillLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
840 
841  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
842  fillLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
843 
844  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
845  CHECK(deserializedNetwork);
846 
847  LayerVerifierBaseWithDescriptor<armnn::FillDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
848 
849  deserializedNetwork->ExecuteStrategy(verifier);
850 }
851 
852 TEST_CASE("SerializeFloor")
853 {
854  const std::string layerName("floor");
855  const armnn::TensorInfo info({4,4}, armnn::DataType::Float32);
856 
858  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
859  armnn::IConnectableLayer* const floorLayer = network->AddFloorLayer(layerName.c_str());
860  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
861 
862  inputLayer->GetOutputSlot(0).Connect(floorLayer->GetInputSlot(0));
863  floorLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
864 
865  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
866  floorLayer->GetOutputSlot(0).SetTensorInfo(info);
867 
868  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
869  CHECK(deserializedNetwork);
870 
871  LayerVerifierBase verifier(layerName, {info}, {info});
872  deserializedNetwork->ExecuteStrategy(verifier);
873 }
874 
876 class FullyConnectedLayerVerifier : public LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>
877 {
878 public:
879  FullyConnectedLayerVerifier(const std::string& layerName,
880  const std::vector<armnn::TensorInfo>& inputInfos,
881  const std::vector<armnn::TensorInfo>& outputInfos,
882  const FullyConnectedDescriptor& descriptor)
883  : LayerVerifierBaseWithDescriptor<FullyConnectedDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
884 
885  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
886  const armnn::BaseDescriptor& descriptor,
887  const std::vector<armnn::ConstTensor>& constants,
888  const char* name,
889  const armnn::LayerBindingId id = 0) override
890  {
891  armnn::IgnoreUnused(constants, id);
892  switch (layer->GetType())
893  {
894  case armnn::LayerType::Input: break;
895  case armnn::LayerType::Output: break;
896  case armnn::LayerType::Constant: break;
897  default:
898  {
899  VerifyNameAndConnections(layer, name);
900  const FullyConnectedDescriptor& layerDescriptor =
901  static_cast<const FullyConnectedDescriptor&>(descriptor);
902  CHECK(layerDescriptor.m_ConstantWeights == m_Descriptor.m_ConstantWeights);
903  CHECK(layerDescriptor.m_BiasEnabled == m_Descriptor.m_BiasEnabled);
904  CHECK(layerDescriptor.m_TransposeWeightMatrix == m_Descriptor.m_TransposeWeightMatrix);
905  }
906  }
907  }
908 };
909 
910 TEST_CASE("SerializeFullyConnected")
911 {
912  const std::string layerName("fullyConnected");
913  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
914  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
915 
916  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
917  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
918  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
919  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
920  armnn::ConstTensor weights(weightsInfo, weightsData);
921  armnn::ConstTensor biases(biasesInfo, biasesData);
922 
924  descriptor.m_BiasEnabled = true;
925  descriptor.m_TransposeWeightMatrix = false;
926  descriptor.m_ConstantWeights = true;
927 
929  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
930 
931  // Old way of handling constant tensors.
933  armnn::IConnectableLayer* const fullyConnectedLayer =
934  network->AddFullyConnectedLayer(descriptor,
935  weights,
937  layerName.c_str());
939 
940  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
941 
942  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
943  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
944 
945  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
946  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
947 
948  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
949  CHECK(deserializedNetwork);
950 
951  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
952  deserializedNetwork->ExecuteStrategy(verifier);
953 }
954 
955 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsInputs")
956 {
957  const std::string layerName("fullyConnected_weights_as_inputs");
958  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
959  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
960 
961  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32);
962  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
963 
966 
968  descriptor.m_BiasEnabled = true;
969  descriptor.m_TransposeWeightMatrix = false;
970  descriptor.m_ConstantWeights = false;
971 
973  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
974  armnn::IConnectableLayer* const weightsInputLayer = network->AddInputLayer(1);
975  armnn::IConnectableLayer* const biasInputLayer = network->AddInputLayer(2);
976  armnn::IConnectableLayer* const fullyConnectedLayer =
977  network->AddFullyConnectedLayer(descriptor,
978  layerName.c_str());
979  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
980 
981  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
982  weightsInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
983  biasInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
984  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
985 
986  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
987  weightsInputLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
988  biasInputLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
989  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
990 
991  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
992  CHECK(deserializedNetwork);
993 
994  const std::vector<armnn::ConstTensor> constants {};
996  layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor, constants);
997  deserializedNetwork->ExecuteStrategy(verifier);
998 }
999 
1000 TEST_CASE("SerializeFullyConnectedWeightsAndBiasesAsConstantLayers")
1001 {
1002  const std::string layerName("fullyConnected_weights_as_inputs");
1003  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1004  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1005 
1006  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1007  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32, 0.0f, 0, true);
1008 
1009  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
1010  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
1011  armnn::ConstTensor weights(weightsInfo, weightsData);
1012  armnn::ConstTensor biases(biasesInfo, biasesData);
1013 
1015  descriptor.m_BiasEnabled = true;
1016  descriptor.m_TransposeWeightMatrix = false;
1017  descriptor.m_ConstantWeights = true;
1018 
1020  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1021  armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
1022  armnn::IConnectableLayer* const biasesLayer = network->AddConstantLayer(biases, "Biases");
1023  armnn::IConnectableLayer* const fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor,layerName.c_str());
1024  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1025 
1026  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1027  weightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
1028  biasesLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
1029  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1030 
1031  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1032  weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
1033  biasesLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
1034  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1035 
1036  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1037  CHECK(deserializedNetwork);
1038 
1039  FullyConnectedLayerVerifier verifier(layerName, {inputInfo, weightsInfo, biasesInfo}, {outputInfo}, descriptor);
1040  deserializedNetwork->ExecuteStrategy(verifier);
1041 }
1042 
1043 TEST_CASE("SerializeGather")
1044 {
1046  class GatherLayerVerifier : public LayerVerifierBaseWithDescriptor<GatherDescriptor>
1047  {
1048  public:
1049  GatherLayerVerifier(const std::string& layerName,
1050  const std::vector<armnn::TensorInfo>& inputInfos,
1051  const std::vector<armnn::TensorInfo>& outputInfos,
1052  const GatherDescriptor& descriptor)
1053  : LayerVerifierBaseWithDescriptor<GatherDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1054 
1055  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1056  const armnn::BaseDescriptor& descriptor,
1057  const std::vector<armnn::ConstTensor>& constants,
1058  const char* name,
1059  const armnn::LayerBindingId id = 0) override
1060  {
1061  armnn::IgnoreUnused(constants, id);
1062  switch (layer->GetType())
1063  {
1064  case armnn::LayerType::Input: break;
1065  case armnn::LayerType::Output: break;
1066  case armnn::LayerType::Constant: break;
1067  default:
1068  {
1069  VerifyNameAndConnections(layer, name);
1070  const GatherDescriptor& layerDescriptor = static_cast<const GatherDescriptor&>(descriptor);
1071  CHECK(layerDescriptor.m_Axis == m_Descriptor.m_Axis);
1072  }
1073  }
1074  }
1075  };
1076 
1077  const std::string layerName("gather");
1078  armnn::TensorInfo paramsInfo({ 8 }, armnn::DataType::QAsymmU8);
1079  armnn::TensorInfo outputInfo({ 3 }, armnn::DataType::QAsymmU8);
1080  const armnn::TensorInfo indicesInfo({ 3 }, armnn::DataType::Signed32, 0.0f, 0, true);
1081  GatherDescriptor descriptor;
1082  descriptor.m_Axis = 1;
1083 
1084  paramsInfo.SetQuantizationScale(1.0f);
1085  paramsInfo.SetQuantizationOffset(0);
1086  outputInfo.SetQuantizationScale(1.0f);
1087  outputInfo.SetQuantizationOffset(0);
1088 
1089  const std::vector<int32_t>& indicesData = {7, 6, 5};
1090 
1092  armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
1093  armnn::IConnectableLayer *const constantLayer =
1094  network->AddConstantLayer(armnn::ConstTensor(indicesInfo, indicesData));
1095  armnn::IConnectableLayer *const gatherLayer = network->AddGatherLayer(descriptor, layerName.c_str());
1096  armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
1097 
1098  inputLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(0));
1099  constantLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(1));
1100  gatherLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1101 
1102  inputLayer->GetOutputSlot(0).SetTensorInfo(paramsInfo);
1103  constantLayer->GetOutputSlot(0).SetTensorInfo(indicesInfo);
1104  gatherLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1105 
1106  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1107  CHECK(deserializedNetwork);
1108 
1109  GatherLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo}, descriptor);
1110  deserializedNetwork->ExecuteStrategy(verifier);
1111 }
1112 
1113 
1114 TEST_CASE("SerializeComparisonGreater")
1115 {
1116  const std::string layerName("greater");
1117 
1118  const armnn::TensorShape shape{2, 1, 2, 4};
1119 
1122 
1124  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1125  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1127  armnn::IConnectableLayer* const equalLayer = network->AddComparisonLayer(greaterDescriptor, layerName.c_str());
1128  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1129 
1130  inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
1131  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
1132  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1133 
1134  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1135  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1136  equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1137 
1138  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1139  CHECK(deserializedNetwork);
1140 
1141  LayerVerifierBase verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
1142  deserializedNetwork->ExecuteStrategy(verifier);
1143 }
1144 
1145 
1146 TEST_CASE("SerializeInstanceNormalization")
1147 {
1148  const std::string layerName("instanceNormalization");
1149  const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32);
1150 
1152  descriptor.m_Gamma = 1.1f;
1153  descriptor.m_Beta = 0.1f;
1154  descriptor.m_Eps = 0.0001f;
1155  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
1156 
1158  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1159  armnn::IConnectableLayer* const instanceNormLayer =
1160  network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1161  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1162 
1163  inputLayer->GetOutputSlot(0).Connect(instanceNormLayer->GetInputSlot(0));
1164  instanceNormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1165 
1166  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1167  instanceNormLayer->GetOutputSlot(0).SetTensorInfo(info);
1168 
1169  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1170  CHECK(deserializedNetwork);
1171 
1173  layerName, {info}, {info}, descriptor);
1174  deserializedNetwork->ExecuteStrategy(verifier);
1175 }
1176 
1177 TEST_CASE("SerializeL2Normalization")
1178 {
1179  const std::string l2NormLayerName("l2Normalization");
1180  const armnn::TensorInfo info({1, 2, 1, 5}, armnn::DataType::Float32);
1181 
1184  desc.m_Eps = 0.0001f;
1185 
1187  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1188  armnn::IConnectableLayer* const l2NormLayer = network->AddL2NormalizationLayer(desc, l2NormLayerName.c_str());
1189  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1190 
1191  inputLayer0->GetOutputSlot(0).Connect(l2NormLayer->GetInputSlot(0));
1192  l2NormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1193 
1194  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1195  l2NormLayer->GetOutputSlot(0).SetTensorInfo(info);
1196 
1197  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1198  CHECK(deserializedNetwork);
1199 
1201  l2NormLayerName, {info}, {info}, desc);
1202  deserializedNetwork->ExecuteStrategy(verifier);
1203 }
1204 
1205 TEST_CASE("EnsureL2NormalizationBackwardCompatibility")
1206 {
1207  // The hex data below is a flat buffer containing a simple network with one input
1208  // a L2Normalization layer and an output layer with dimensions as per the tensor infos below.
1209  //
1210  // This test verifies that we can still read back these old style
1211  // models without the normalization epsilon value.
1212  const std::vector<uint8_t> l2NormalizationModel =
1213  {
1214  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1215  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1216  0x3C, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1217  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1218  0x04, 0x00, 0x00, 0x00, 0xD6, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
1219  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
1220  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1221  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1222  0x4C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
1223  0x00, 0x20, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1224  0x20, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x06, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1225  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1226  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1F, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x20, 0x00,
1227  0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x6C, 0x32, 0x4E, 0x6F, 0x72, 0x6D, 0x61, 0x6C, 0x69, 0x7A, 0x61, 0x74,
1228  0x69, 0x6F, 0x6E, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00,
1229  0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1230  0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
1231  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
1232  0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1233  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1234  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1235  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1236  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1237  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1238  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1239  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1240  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1241  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1242  0x05, 0x00, 0x00, 0x00, 0x00
1243  };
1244 
1245  armnn::INetworkPtr deserializedNetwork =
1246  DeserializeNetwork(std::string(l2NormalizationModel.begin(), l2NormalizationModel.end()));
1247  CHECK(deserializedNetwork);
1248 
1249  const std::string layerName("l2Normalization");
1250  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 2, 1, 5}, armnn::DataType::Float32);
1251 
1254  // Since this variable does not exist in the l2NormalizationModel dump, the default value will be loaded
1255  desc.m_Eps = 1e-12f;
1256 
1258  layerName, {inputInfo}, {inputInfo}, desc);
1259  deserializedNetwork->ExecuteStrategy(verifier);
1260 }
1261 
1262 TEST_CASE("SerializeLogicalBinary")
1263 {
1264  const std::string layerName("logicalBinaryAnd");
1265 
1266  const armnn::TensorShape shape{2, 1, 2, 2};
1267 
1270 
1272 
1274  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1275  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1276  armnn::IConnectableLayer* const logicalBinaryLayer = network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1277  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1278 
1279  inputLayer0->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(0));
1280  inputLayer1->GetOutputSlot(0).Connect(logicalBinaryLayer->GetInputSlot(1));
1281  logicalBinaryLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1282 
1283  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1284  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1285  logicalBinaryLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1286 
1287  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1288  CHECK(deserializedNetwork);
1289 
1291  layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
1292  deserializedNetwork->ExecuteStrategy(verifier);
1293 }
1294 
1295 TEST_CASE("SerializeLogSoftmax")
1296 {
1297  const std::string layerName("log_softmax");
1298  const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32);
1299 
1300  armnn::LogSoftmaxDescriptor descriptor;
1301  descriptor.m_Beta = 1.0f;
1302  descriptor.m_Axis = -1;
1303 
1305  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1306  armnn::IConnectableLayer* const logSoftmaxLayer = network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1307  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1308 
1309  inputLayer->GetOutputSlot(0).Connect(logSoftmaxLayer->GetInputSlot(0));
1310  logSoftmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1311 
1312  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1313  logSoftmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
1314 
1315  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1316  CHECK(deserializedNetwork);
1317 
1318  LayerVerifierBaseWithDescriptor<armnn::LogSoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
1319  deserializedNetwork->ExecuteStrategy(verifier);
1320 }
1321 
1322 TEST_CASE("SerializeMaximum")
1323 {
1324  const std::string layerName("maximum");
1325  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1326 
1328  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1329  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1330  armnn::IConnectableLayer* const maximumLayer = network->AddMaximumLayer(layerName.c_str());
1331  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1332 
1333  inputLayer0->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(0));
1334  inputLayer1->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(1));
1335  maximumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1336 
1337  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1338  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1339  maximumLayer->GetOutputSlot(0).SetTensorInfo(info);
1340 
1341  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1342  CHECK(deserializedNetwork);
1343 
1344  LayerVerifierBase verifier(layerName, {info, info}, {info});
1345  deserializedNetwork->ExecuteStrategy(verifier);
1346 }
1347 
1348 TEST_CASE("SerializeMean")
1349 {
1350  const std::string layerName("mean");
1351  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1352  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1353 
1354  armnn::MeanDescriptor descriptor;
1355  descriptor.m_Axis = { 2 };
1356  descriptor.m_KeepDims = true;
1357 
1359  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1360  armnn::IConnectableLayer* const meanLayer = network->AddMeanLayer(descriptor, layerName.c_str());
1361  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1362 
1363  inputLayer->GetOutputSlot(0).Connect(meanLayer->GetInputSlot(0));
1364  meanLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1365 
1366  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1367  meanLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1368 
1369  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1370  CHECK(deserializedNetwork);
1371 
1372  LayerVerifierBaseWithDescriptor<armnn::MeanDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1373  deserializedNetwork->ExecuteStrategy(verifier);
1374 }
1375 
1376 TEST_CASE("SerializeMerge")
1377 {
1378  const std::string layerName("merge");
1379  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1380 
1382  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1383  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1384  armnn::IConnectableLayer* const mergeLayer = network->AddMergeLayer(layerName.c_str());
1385  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1386 
1387  inputLayer0->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(0));
1388  inputLayer1->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(1));
1389  mergeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1390 
1391  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1392  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1393  mergeLayer->GetOutputSlot(0).SetTensorInfo(info);
1394 
1395  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1396  CHECK(deserializedNetwork);
1397 
1398  LayerVerifierBase verifier(layerName, {info, info}, {info});
1399  deserializedNetwork->ExecuteStrategy(verifier);
1400 }
1401 
1402 class MergerLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>
1403 {
1404 public:
1405  MergerLayerVerifier(const std::string& layerName,
1406  const std::vector<armnn::TensorInfo>& inputInfos,
1407  const std::vector<armnn::TensorInfo>& outputInfos,
1408  const armnn::OriginsDescriptor& descriptor)
1409  : LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1410 
1411  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
1412  const armnn::BaseDescriptor& descriptor,
1413  const std::vector<armnn::ConstTensor>& constants,
1414  const char* name,
1415  const armnn::LayerBindingId id = 0) override
1416  {
1417  armnn::IgnoreUnused(descriptor, constants, id);
1418  switch (layer->GetType())
1419  {
1420  case armnn::LayerType::Input: break;
1421  case armnn::LayerType::Output: break;
1423  {
1424  throw armnn::Exception("MergerLayer should have translated to ConcatLayer");
1425  break;
1426  }
1428  {
1429  VerifyNameAndConnections(layer, name);
1430  const armnn::MergerDescriptor& layerDescriptor =
1431  static_cast<const armnn::MergerDescriptor&>(descriptor);
1432  VerifyDescriptor(layerDescriptor);
1433  break;
1434  }
1435  default:
1436  {
1437  throw armnn::Exception("Unexpected layer type in Merge test model");
1438  }
1439  }
1440  }
1441 };
1442 
1443 TEST_CASE("EnsureMergerLayerBackwardCompatibility")
1444 {
1445  // The hex data below is a flat buffer containing a simple network with two inputs
1446  // a merger layer (now deprecated) and an output layer with dimensions as per the tensor infos below.
1447  //
1448  // This test verifies that we can still read back these old style
1449  // models replacing the MergerLayers with ConcatLayers with the same parameters.
1450  const std::vector<uint8_t> mergerModel =
1451  {
1452  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1453  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1454  0x38, 0x02, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1455  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1456  0xF4, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1457  0x00, 0x00, 0x9A, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1458  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1459  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1460  0xF8, 0xFE, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1461  0x00, 0x1F, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1462  0x68, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1463  0x0C, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1464  0x02, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x22, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1465  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1466  0x00, 0x00, 0x00, 0x00, 0x3E, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1467  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1468  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1469  0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x6D, 0x65, 0x72, 0x67, 0x65, 0x72, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1470  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1471  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1472  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00,
1473  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1474  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1475  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1476  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1477  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1478  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1479  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1480  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1481  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1482  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1483  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1484  0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1485  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1486  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1487  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1488  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1489  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1490  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1491  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1492  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1493  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1494  0x02, 0x00, 0x00, 0x00
1495  };
1496 
1497  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(mergerModel.begin(), mergerModel.end()));
1498  CHECK(deserializedNetwork);
1499 
1500  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 2, 3, 2, 2 }, armnn::DataType::Float32);
1501  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 4, 3, 2, 2 }, armnn::DataType::Float32);
1502 
1503  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1504 
1505  armnn::OriginsDescriptor descriptor =
1506  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1507 
1508  MergerLayerVerifier verifier("merger", { inputInfo, inputInfo }, { outputInfo }, descriptor);
1509  deserializedNetwork->ExecuteStrategy(verifier);
1510 }
1511 
1512 TEST_CASE("SerializeConcat")
1513 {
1514  const std::string layerName("concat");
1515  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1516  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1517 
1518  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1519 
1520  armnn::OriginsDescriptor descriptor =
1521  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1522 
1524  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1525  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1526  armnn::IConnectableLayer* const concatLayer = network->AddConcatLayer(descriptor, layerName.c_str());
1527  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1528 
1529  inputLayerOne->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
1530  inputLayerTwo->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
1531  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1532 
1533  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1534  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1535  concatLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1536 
1537  std::string concatLayerNetwork = SerializeNetwork(*network);
1538  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(concatLayerNetwork);
1539  CHECK(deserializedNetwork);
1540 
1541  // NOTE: using the MergerLayerVerifier to ensure that it is a concat layer and not a
1542  // merger layer that gets placed into the graph.
1543  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1544  deserializedNetwork->ExecuteStrategy(verifier);
1545 }
1546 
1547 TEST_CASE("SerializeMinimum")
1548 {
1549  const std::string layerName("minimum");
1550  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1551 
1553  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1554  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1555  armnn::IConnectableLayer* const minimumLayer = network->AddMinimumLayer(layerName.c_str());
1556  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1557 
1558  inputLayer0->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(0));
1559  inputLayer1->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(1));
1560  minimumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1561 
1562  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1563  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1564  minimumLayer->GetOutputSlot(0).SetTensorInfo(info);
1565 
1566  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1567  CHECK(deserializedNetwork);
1568 
1569  LayerVerifierBase verifier(layerName, {info, info}, {info});
1570  deserializedNetwork->ExecuteStrategy(verifier);
1571 }
1572 
1573 TEST_CASE("SerializeMultiplication")
1574 {
1575  const std::string layerName("multiplication");
1576  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
1577 
1579  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1580  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1581  armnn::IConnectableLayer* const multiplicationLayer = network->AddMultiplicationLayer(layerName.c_str());
1582  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1583 
1584  inputLayer0->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
1585  inputLayer1->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
1586  multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1587 
1588  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1589  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1590  multiplicationLayer->GetOutputSlot(0).SetTensorInfo(info);
1591 
1592  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1593  CHECK(deserializedNetwork);
1594 
1595  LayerVerifierBase verifier(layerName, {info, info}, {info});
1596  deserializedNetwork->ExecuteStrategy(verifier);
1597 }
1598 
1599 TEST_CASE("SerializePrelu")
1600 {
1601  const std::string layerName("prelu");
1602 
1603  armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32);
1604  armnn::TensorInfo alphaTensorInfo ({ 5, 4, 3, 1 }, armnn::DataType::Float32);
1605  armnn::TensorInfo outputTensorInfo({ 5, 4, 3, 2 }, armnn::DataType::Float32);
1606 
1608  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1609  armnn::IConnectableLayer* const alphaLayer = network->AddInputLayer(1);
1610  armnn::IConnectableLayer* const preluLayer = network->AddPreluLayer(layerName.c_str());
1611  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1612 
1613  inputLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(0));
1614  alphaLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(1));
1615  preluLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1616 
1617  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1618  alphaLayer->GetOutputSlot(0).SetTensorInfo(alphaTensorInfo);
1619  preluLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1620 
1621  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1622  CHECK(deserializedNetwork);
1623 
1624  LayerVerifierBase verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
1625  deserializedNetwork->ExecuteStrategy(verifier);
1626 }
1627 
1628 TEST_CASE("SerializeNormalization")
1629 {
1630  const std::string layerName("normalization");
1631  const armnn::TensorInfo info({2, 1, 2, 2}, armnn::DataType::Float32);
1632 
1635  desc.m_NormSize = 3;
1636  desc.m_Alpha = 1;
1637  desc.m_Beta = 1;
1638  desc.m_K = 1;
1639 
1641  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1642  armnn::IConnectableLayer* const normalizationLayer = network->AddNormalizationLayer(desc, layerName.c_str());
1643  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1644 
1645  inputLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
1646  normalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1647 
1648  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1649  normalizationLayer->GetOutputSlot(0).SetTensorInfo(info);
1650 
1651  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1652  CHECK(deserializedNetwork);
1653 
1654  LayerVerifierBaseWithDescriptor<armnn::NormalizationDescriptor> verifier(layerName, {info}, {info}, desc);
1655  deserializedNetwork->ExecuteStrategy(verifier);
1656 }
1657 
1658 TEST_CASE("SerializePad")
1659 {
1660  const std::string layerName("pad");
1661  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1662  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1663 
1664  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1665 
1667  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1668  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1669  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1670 
1671  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1672  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1673 
1674  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1675  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1676 
1677  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1678  CHECK(deserializedNetwork);
1679 
1681  {inputTensorInfo},
1682  {outputTensorInfo},
1683  desc);
1684  deserializedNetwork->ExecuteStrategy(verifier);
1685 }
1686 
1687 TEST_CASE("SerializePadReflect")
1688 {
1689  const std::string layerName("padReflect");
1690  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1691  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1692 
1693  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1695 
1697  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1698  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1699  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1700 
1701  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1702  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1703 
1704  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1705  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1706 
1707  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1708  CHECK(deserializedNetwork);
1709 
1711  {inputTensorInfo},
1712  {outputTensorInfo},
1713  desc);
1714  deserializedNetwork->ExecuteStrategy(verifier);
1715 }
1716 
1717 TEST_CASE("EnsurePadBackwardCompatibility")
1718 {
1719  // The PadDescriptor is being extended with a float PadValue (so a value other than 0
1720  // can be used to pad the tensor.
1721  //
1722  // This test contains a binary representation of a simple input->pad->output network
1723  // prior to this change to test that the descriptor has been updated in a backward
1724  // compatible way with respect to Deserialization of older binary dumps
1725  const std::vector<uint8_t> padModel =
1726  {
1727  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1728  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1729  0x54, 0x01, 0x00, 0x00, 0x6C, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1730  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD0, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1731  0x04, 0x00, 0x00, 0x00, 0x96, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1732  0x00, 0x00, 0x72, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1733  0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
1734  0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00,
1735  0x00, 0x00, 0x00, 0x00, 0x24, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x16, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00,
1736  0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x4C, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1737  0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00,
1738  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1739  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
1740  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1741  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00,
1742  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x70, 0x61, 0x64, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00,
1743  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1744  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1745  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
1746  0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
1747  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00,
1748  0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00,
1749  0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1750  0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00,
1751  0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1752  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1753  0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1754  0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
1755  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
1756  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00
1757  };
1758 
1759  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(padModel.begin(), padModel.end()));
1760  CHECK(deserializedNetwork);
1761 
1762  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 1, 2, 3, 4 }, armnn::DataType::Float32);
1763  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 1, 3, 5, 7 }, armnn::DataType::Float32);
1764 
1765  armnn::PadDescriptor descriptor({{ 0, 0 }, { 1, 0 }, { 1, 1 }, { 1, 2 }});
1766 
1767  LayerVerifierBaseWithDescriptor<armnn::PadDescriptor> verifier("pad", { inputInfo }, { outputInfo }, descriptor);
1768  deserializedNetwork->ExecuteStrategy(verifier);
1769 }
1770 
1771 TEST_CASE("SerializePermute")
1772 {
1773  const std::string layerName("permute");
1774  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
1775  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1776 
1777  armnn::PermuteDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
1778 
1780  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1781  armnn::IConnectableLayer* const permuteLayer = network->AddPermuteLayer(descriptor, layerName.c_str());
1782  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1783 
1784  inputLayer->GetOutputSlot(0).Connect(permuteLayer->GetInputSlot(0));
1785  permuteLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1786 
1787  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1788  permuteLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1789 
1790  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1791  CHECK(deserializedNetwork);
1792 
1794  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
1795  deserializedNetwork->ExecuteStrategy(verifier);
1796 }
1797 
1798 TEST_CASE("SerializePooling2d")
1799 {
1800  const std::string layerName("pooling2d");
1801  const armnn::TensorInfo inputInfo({1, 2, 2, 1}, armnn::DataType::Float32);
1802  const armnn::TensorInfo outputInfo({1, 1, 1, 1}, armnn::DataType::Float32);
1803 
1806  desc.m_PadTop = 0;
1807  desc.m_PadBottom = 0;
1808  desc.m_PadLeft = 0;
1809  desc.m_PadRight = 0;
1810  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
1811  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1812  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1813  desc.m_PoolHeight = 2;
1814  desc.m_PoolWidth = 2;
1815  desc.m_StrideX = 2;
1816  desc.m_StrideY = 2;
1817 
1819  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1820  armnn::IConnectableLayer* const pooling2dLayer = network->AddPooling2dLayer(desc, layerName.c_str());
1821  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1822 
1823  inputLayer->GetOutputSlot(0).Connect(pooling2dLayer->GetInputSlot(0));
1824  pooling2dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1825 
1826  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1827  pooling2dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1828 
1829  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1830  CHECK(deserializedNetwork);
1831 
1833  layerName, {inputInfo}, {outputInfo}, desc);
1834  deserializedNetwork->ExecuteStrategy(verifier);
1835 }
1836 
1837 TEST_CASE("SerializePooling3d")
1838 {
1839  const std::string layerName("pooling3d");
1840  const armnn::TensorInfo inputInfo({1, 1, 2, 2, 2}, armnn::DataType::Float32);
1841  const armnn::TensorInfo outputInfo({1, 1, 1, 1, 1}, armnn::DataType::Float32);
1842 
1845  desc.m_PadFront = 0;
1846  desc.m_PadBack = 0;
1847  desc.m_PadTop = 0;
1848  desc.m_PadBottom = 0;
1849  desc.m_PadLeft = 0;
1850  desc.m_PadRight = 0;
1851  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
1852  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1853  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1854  desc.m_PoolHeight = 2;
1855  desc.m_PoolWidth = 2;
1856  desc.m_PoolDepth = 2;
1857  desc.m_StrideX = 2;
1858  desc.m_StrideY = 2;
1859  desc.m_StrideZ = 2;
1860 
1862  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1863  armnn::IConnectableLayer* const pooling3dLayer = network->AddPooling3dLayer(desc, layerName.c_str());
1864  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1865 
1866  inputLayer->GetOutputSlot(0).Connect(pooling3dLayer->GetInputSlot(0));
1867  pooling3dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1868 
1869  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1870  pooling3dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1871 
1872  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1873  CHECK(deserializedNetwork);
1874 
1876  layerName, {inputInfo}, {outputInfo}, desc);
1877  deserializedNetwork->ExecuteStrategy(verifier);
1878 }
1879 
1880 TEST_CASE("SerializeQuantize")
1881 {
1882  const std::string layerName("quantize");
1883  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1884 
1886  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1887  armnn::IConnectableLayer* const quantizeLayer = network->AddQuantizeLayer(layerName.c_str());
1888  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1889 
1890  inputLayer->GetOutputSlot(0).Connect(quantizeLayer->GetInputSlot(0));
1891  quantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1892 
1893  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1894  quantizeLayer->GetOutputSlot(0).SetTensorInfo(info);
1895 
1896  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1897  CHECK(deserializedNetwork);
1898 
1899  LayerVerifierBase verifier(layerName, {info}, {info});
1900  deserializedNetwork->ExecuteStrategy(verifier);
1901 }
1902 
1903 TEST_CASE("SerializeRank")
1904 {
1905  const std::string layerName("rank");
1906  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
1907  const armnn::TensorInfo outputInfo({1}, armnn::DataType::Signed32);
1908 
1910  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1911  armnn::IConnectableLayer* const rankLayer = network->AddRankLayer(layerName.c_str());
1912  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1913 
1914  inputLayer->GetOutputSlot(0).Connect(rankLayer->GetInputSlot(0));
1915  rankLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1916 
1917  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1918  rankLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1919 
1920  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1921  CHECK(deserializedNetwork);
1922 
1923  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
1924  deserializedNetwork->ExecuteStrategy(verifier);
1925 }
1926 
1927 TEST_CASE("SerializeReduceSum")
1928 {
1929  const std::string layerName("Reduce_Sum");
1930  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1931  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1932 
1933  armnn::ReduceDescriptor descriptor;
1934  descriptor.m_vAxis = { 2 };
1935  descriptor.m_ReduceOperation = armnn::ReduceOperation::Sum;
1936 
1938  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1939  armnn::IConnectableLayer* const reduceSumLayer = network->AddReduceLayer(descriptor, layerName.c_str());
1940  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1941 
1942  inputLayer->GetOutputSlot(0).Connect(reduceSumLayer->GetInputSlot(0));
1943  reduceSumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1944 
1945  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1946  reduceSumLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1947 
1948  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1949  CHECK(deserializedNetwork);
1950 
1951  LayerVerifierBaseWithDescriptor<armnn::ReduceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1952  deserializedNetwork->ExecuteStrategy(verifier);
1953 }
1954 
1955 TEST_CASE("SerializeReshape")
1956 {
1957  const std::string layerName("reshape");
1958  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
1959  const armnn::TensorInfo outputInfo({3, 3}, armnn::DataType::Float32);
1960 
1961  armnn::ReshapeDescriptor descriptor({3, 3});
1962 
1964  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1965  armnn::IConnectableLayer* const reshapeLayer = network->AddReshapeLayer(descriptor, layerName.c_str());
1966  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1967 
1968  inputLayer->GetOutputSlot(0).Connect(reshapeLayer->GetInputSlot(0));
1969  reshapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1970 
1971  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1972  reshapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1973 
1974  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1975  CHECK(deserializedNetwork);
1976 
1978  layerName, {inputInfo}, {outputInfo}, descriptor);
1979  deserializedNetwork->ExecuteStrategy(verifier);
1980 }
1981 
1982 TEST_CASE("SerializeResize")
1983 {
1984  const std::string layerName("resize");
1985  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
1986  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
1987 
1989  desc.m_TargetWidth = 4;
1990  desc.m_TargetHeight = 2;
1991  desc.m_Method = armnn::ResizeMethod::NearestNeighbor;
1992  desc.m_AlignCorners = true;
1993  desc.m_HalfPixelCenters = true;
1994 
1996  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1997  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
1998  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1999 
2000  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2001  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2002 
2003  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2004  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2005 
2006  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2007  CHECK(deserializedNetwork);
2008 
2009  LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
2010  deserializedNetwork->ExecuteStrategy(verifier);
2011 }
2012 
2013 class ResizeBilinearLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor>
2014 {
2015 public:
2016  ResizeBilinearLayerVerifier(const std::string& layerName,
2017  const std::vector<armnn::TensorInfo>& inputInfos,
2018  const std::vector<armnn::TensorInfo>& outputInfos,
2019  const armnn::ResizeDescriptor& descriptor)
2021  layerName, inputInfos, outputInfos, descriptor) {}
2022 
2023  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2024  const armnn::BaseDescriptor& descriptor,
2025  const std::vector<armnn::ConstTensor>& constants,
2026  const char* name,
2027  const armnn::LayerBindingId id = 0) override
2028  {
2029  armnn::IgnoreUnused(descriptor, constants, id);
2030  switch (layer->GetType())
2031  {
2032  case armnn::LayerType::Input: break;
2033  case armnn::LayerType::Output: break;
2035  {
2036  VerifyNameAndConnections(layer, name);
2037  const armnn::ResizeDescriptor& layerDescriptor =
2038  static_cast<const armnn::ResizeDescriptor&>(descriptor);
2039  CHECK(layerDescriptor.m_Method == armnn::ResizeMethod::Bilinear);
2040  CHECK(layerDescriptor.m_TargetWidth == m_Descriptor.m_TargetWidth);
2041  CHECK(layerDescriptor.m_TargetHeight == m_Descriptor.m_TargetHeight);
2042  CHECK(layerDescriptor.m_DataLayout == m_Descriptor.m_DataLayout);
2043  CHECK(layerDescriptor.m_AlignCorners == m_Descriptor.m_AlignCorners);
2044  CHECK(layerDescriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters);
2045  break;
2046  }
2047  default:
2048  {
2049  throw armnn::Exception("Unexpected layer type in test model. ResizeBiliniar "
2050  "should have translated to Resize");
2051  }
2052  }
2053  }
2054 };
2055 
2056 TEST_CASE("SerializeResizeBilinear")
2057 {
2058  const std::string layerName("resizeBilinear");
2059  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2060  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2061 
2064  desc.m_TargetWidth = 4u;
2065  desc.m_TargetHeight = 2u;
2066  desc.m_AlignCorners = true;
2067  desc.m_HalfPixelCenters = true;
2068 
2070  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2071  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
2072  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2073 
2074  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2075  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2076 
2077  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2078  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2079 
2080  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2081  CHECK(deserializedNetwork);
2082 
2083  ResizeBilinearLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2084  deserializedNetwork->ExecuteStrategy(verifier);
2085 }
2086 
2087 TEST_CASE("EnsureResizeBilinearBackwardCompatibility")
2088 {
2089  // The hex data below is a flat buffer containing a simple network with an input,
2090  // a ResizeBilinearLayer (now deprecated and removed) and an output
2091  //
2092  // This test verifies that we can still deserialize this old-style model by replacing
2093  // the ResizeBilinearLayer with an equivalent ResizeLayer
2094  const std::vector<uint8_t> resizeBilinearModel =
2095  {
2096  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
2097  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
2098  0x50, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
2099  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD4, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
2100  0x04, 0x00, 0x00, 0x00, 0xC2, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
2101  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x8A, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
2102  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
2103  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2104  0x38, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
2105  0x00, 0x1A, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
2106  0x34, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x12, 0x00, 0x08, 0x00, 0x0C, 0x00,
2107  0x07, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
2108  0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00,
2109  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00,
2110  0x20, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x69, 0x7A, 0x65, 0x42, 0x69, 0x6C, 0x69,
2111  0x6E, 0x65, 0x61, 0x72, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
2112  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
2113  0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2114  0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
2115  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2116  0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
2117  0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
2118  0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00,
2119  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
2120  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2121  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00,
2122  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00,
2123  0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
2124  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
2125  0x00, 0x00, 0x05, 0x00, 0x00, 0x00
2126  };
2127 
2128  armnn::INetworkPtr deserializedNetwork =
2129  DeserializeNetwork(std::string(resizeBilinearModel.begin(), resizeBilinearModel.end()));
2130  CHECK(deserializedNetwork);
2131 
2132  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2133  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2134 
2135  armnn::ResizeDescriptor descriptor;
2136  descriptor.m_TargetWidth = 4u;
2137  descriptor.m_TargetHeight = 2u;
2138 
2139  ResizeBilinearLayerVerifier verifier("resizeBilinear", { inputInfo }, { outputInfo }, descriptor);
2140  deserializedNetwork->ExecuteStrategy(verifier);
2141 }
2142 
2143 TEST_CASE("SerializeShape")
2144 {
2145  const std::string layerName("shape");
2146  const armnn::TensorInfo inputInfo({1, 3, 3, 1}, armnn::DataType::Signed32);
2147  const armnn::TensorInfo outputInfo({ 4 }, armnn::DataType::Signed32);
2148 
2150  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2151  armnn::IConnectableLayer* const shapeLayer = network->AddShapeLayer(layerName.c_str());
2152  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2153 
2154  inputLayer->GetOutputSlot(0).Connect(shapeLayer->GetInputSlot(0));
2155  shapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2156 
2157  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2158  shapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2159 
2160  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2161  CHECK(deserializedNetwork);
2162 
2163  LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
2164 
2165  deserializedNetwork->ExecuteStrategy(verifier);
2166 }
2167 
2168 TEST_CASE("SerializeSlice")
2169 {
2170  const std::string layerName{"slice"};
2171 
2172  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2173  const armnn::TensorInfo outputInfo = armnn::TensorInfo({2, 2, 2, 1}, armnn::DataType::Float32);
2174 
2175  armnn::SliceDescriptor descriptor({ 0, 0, 1, 0}, {2, 2, 2, 1});
2176 
2178 
2179  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2180  armnn::IConnectableLayer* const sliceLayer = network->AddSliceLayer(descriptor, layerName.c_str());
2181  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2182 
2183  inputLayer->GetOutputSlot(0).Connect(sliceLayer->GetInputSlot(0));
2184  sliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2185 
2186  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2187  sliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2188 
2189  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2190  CHECK(deserializedNetwork);
2191 
2192  LayerVerifierBaseWithDescriptor<armnn::SliceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2193  deserializedNetwork->ExecuteStrategy(verifier);
2194 }
2195 
2196 TEST_CASE("SerializeSoftmax")
2197 {
2198  const std::string layerName("softmax");
2199  const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32);
2200 
2201  armnn::SoftmaxDescriptor descriptor;
2202  descriptor.m_Beta = 1.0f;
2203 
2205  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2206  armnn::IConnectableLayer* const softmaxLayer = network->AddSoftmaxLayer(descriptor, layerName.c_str());
2207  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2208 
2209  inputLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
2210  softmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2211 
2212  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2213  softmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
2214 
2215  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2216  CHECK(deserializedNetwork);
2217 
2218  LayerVerifierBaseWithDescriptor<armnn::SoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
2219  deserializedNetwork->ExecuteStrategy(verifier);
2220 }
2221 
2222 TEST_CASE("SerializeSpaceToBatchNd")
2223 {
2224  const std::string layerName("spaceToBatchNd");
2225  const armnn::TensorInfo inputInfo({2, 1, 2, 4}, armnn::DataType::Float32);
2226  const armnn::TensorInfo outputInfo({8, 1, 1, 3}, armnn::DataType::Float32);
2227 
2230  desc.m_BlockShape = {2, 2};
2231  desc.m_PadList = {{0, 0}, {2, 0}};
2232 
2234  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2235  armnn::IConnectableLayer* const spaceToBatchNdLayer = network->AddSpaceToBatchNdLayer(desc, layerName.c_str());
2236  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2237 
2238  inputLayer->GetOutputSlot(0).Connect(spaceToBatchNdLayer->GetInputSlot(0));
2239  spaceToBatchNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2240 
2241  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2242  spaceToBatchNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2243 
2244  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2245  CHECK(deserializedNetwork);
2246 
2248  layerName, {inputInfo}, {outputInfo}, desc);
2249  deserializedNetwork->ExecuteStrategy(verifier);
2250 }
2251 
2252 TEST_CASE("SerializeSpaceToDepth")
2253 {
2254  const std::string layerName("spaceToDepth");
2255 
2256  const armnn::TensorInfo inputInfo ({ 1, 16, 8, 3 }, armnn::DataType::Float32);
2257  const armnn::TensorInfo outputInfo({ 1, 8, 4, 12 }, armnn::DataType::Float32);
2258 
2260  desc.m_BlockSize = 2;
2261  desc.m_DataLayout = armnn::DataLayout::NHWC;
2262 
2264  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2265  armnn::IConnectableLayer* const spaceToDepthLayer = network->AddSpaceToDepthLayer(desc, layerName.c_str());
2266  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2267 
2268  inputLayer->GetOutputSlot(0).Connect(spaceToDepthLayer->GetInputSlot(0));
2269  spaceToDepthLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2270 
2271  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2272  spaceToDepthLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2273 
2274  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2275  CHECK(deserializedNetwork);
2276 
2278  layerName, {inputInfo}, {outputInfo}, desc);
2279  deserializedNetwork->ExecuteStrategy(verifier);
2280 }
2281 
2282 TEST_CASE("SerializeSplitter")
2283 {
2284  const unsigned int numViews = 3;
2285  const unsigned int numDimensions = 4;
2286  const unsigned int inputShape[] = {1, 18, 4, 4};
2287  const unsigned int outputShape[] = {1, 6, 4, 4};
2288 
2289  // This is modelled on how the caffe parser sets up a splitter layer to partition an input along dimension one.
2290  unsigned int splitterDimSizes[4] = {static_cast<unsigned int>(inputShape[0]),
2291  static_cast<unsigned int>(inputShape[1]),
2292  static_cast<unsigned int>(inputShape[2]),
2293  static_cast<unsigned int>(inputShape[3])};
2294  splitterDimSizes[1] /= numViews;
2295  armnn::ViewsDescriptor desc(numViews, numDimensions);
2296 
2297  for (unsigned int g = 0; g < numViews; ++g)
2298  {
2299  desc.SetViewOriginCoord(g, 1, splitterDimSizes[1] * g);
2300 
2301  for (unsigned int dimIdx=0; dimIdx < 4; dimIdx++)
2302  {
2303  desc.SetViewSize(g, dimIdx, splitterDimSizes[dimIdx]);
2304  }
2305  }
2306 
2307  const std::string layerName("splitter");
2308  const armnn::TensorInfo inputInfo(numDimensions, inputShape, armnn::DataType::Float32);
2309  const armnn::TensorInfo outputInfo(numDimensions, outputShape, armnn::DataType::Float32);
2310 
2312  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2313  armnn::IConnectableLayer* const splitterLayer = network->AddSplitterLayer(desc, layerName.c_str());
2314  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2315  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2316  armnn::IConnectableLayer* const outputLayer2 = network->AddOutputLayer(2);
2317 
2318  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
2319  splitterLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2320  splitterLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2321  splitterLayer->GetOutputSlot(2).Connect(outputLayer2->GetInputSlot(0));
2322 
2323  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2324  splitterLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2325  splitterLayer->GetOutputSlot(1).SetTensorInfo(outputInfo);
2326  splitterLayer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2327 
2328  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2329  CHECK(deserializedNetwork);
2330 
2332  layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc);
2333  deserializedNetwork->ExecuteStrategy(verifier);
2334 }
2335 
2336 TEST_CASE("SerializeStack")
2337 {
2338  const std::string layerName("stack");
2339 
2340  armnn::TensorInfo inputTensorInfo ({4, 3, 5}, armnn::DataType::Float32);
2341  armnn::TensorInfo outputTensorInfo({4, 3, 2, 5}, armnn::DataType::Float32);
2342 
2343  armnn::StackDescriptor descriptor(2, 2, {4, 3, 5});
2344 
2346  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
2347  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
2348  armnn::IConnectableLayer* const stackLayer = network->AddStackLayer(descriptor, layerName.c_str());
2349  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2350 
2351  inputLayer1->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(0));
2352  inputLayer2->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(1));
2353  stackLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2354 
2355  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2356  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2357  stackLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2358 
2359  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2360  CHECK(deserializedNetwork);
2361 
2363  layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor);
2364  deserializedNetwork->ExecuteStrategy(verifier);
2365 }
2366 
2367 TEST_CASE("SerializeStandIn")
2368 {
2369  const std::string layerName("standIn");
2370 
2371  armnn::TensorInfo tensorInfo({ 1u }, armnn::DataType::Float32);
2372  armnn::StandInDescriptor descriptor(2u, 2u);
2373 
2375  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2376  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2377  armnn::IConnectableLayer* const standInLayer = network->AddStandInLayer(descriptor, layerName.c_str());
2378  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2379  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2380 
2381  inputLayer0->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(0));
2382  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2383 
2384  inputLayer1->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(1));
2385  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2386 
2387  standInLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2388  standInLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2389 
2390  standInLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2391  standInLayer->GetOutputSlot(1).SetTensorInfo(tensorInfo);
2392 
2393  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2394  CHECK(deserializedNetwork);
2395 
2397  layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor);
2398  deserializedNetwork->ExecuteStrategy(verifier);
2399 }
2400 
2401 TEST_CASE("SerializeStridedSlice")
2402 {
2403  const std::string layerName("stridedSlice");
2404  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2405  const armnn::TensorInfo outputInfo = armnn::TensorInfo({3, 1}, armnn::DataType::Float32);
2406 
2407  armnn::StridedSliceDescriptor desc({0, 0, 1, 0}, {1, 1, 1, 1}, {1, 1, 1, 1});
2408  desc.m_EndMask = (1 << 4) - 1;
2409  desc.m_ShrinkAxisMask = (1 << 1) | (1 << 2);
2410  desc.m_DataLayout = armnn::DataLayout::NCHW;
2411 
2413  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2414  armnn::IConnectableLayer* const stridedSliceLayer = network->AddStridedSliceLayer(desc, layerName.c_str());
2415  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2416 
2417  inputLayer->GetOutputSlot(0).Connect(stridedSliceLayer->GetInputSlot(0));
2418  stridedSliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2419 
2420  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2421  stridedSliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2422 
2423  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2424  CHECK(deserializedNetwork);
2425 
2427  layerName, {inputInfo}, {outputInfo}, desc);
2428  deserializedNetwork->ExecuteStrategy(verifier);
2429 }
2430 
2431 TEST_CASE("SerializeSubtraction")
2432 {
2433  const std::string layerName("subtraction");
2434  const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32);
2435 
2437  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2438  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2439  armnn::IConnectableLayer* const subtractionLayer = network->AddSubtractionLayer(layerName.c_str());
2440  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2441 
2442  inputLayer0->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(0));
2443  inputLayer1->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(1));
2444  subtractionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2445 
2446  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
2447  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
2448  subtractionLayer->GetOutputSlot(0).SetTensorInfo(info);
2449 
2450  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2451  CHECK(deserializedNetwork);
2452 
2453  LayerVerifierBase verifier(layerName, {info, info}, {info});
2454  deserializedNetwork->ExecuteStrategy(verifier);
2455 }
2456 
2457 TEST_CASE("SerializeSwitch")
2458 {
2459  class SwitchLayerVerifier : public LayerVerifierBase
2460  {
2461  public:
2462  SwitchLayerVerifier(const std::string& layerName,
2463  const std::vector<armnn::TensorInfo>& inputInfos,
2464  const std::vector<armnn::TensorInfo>& outputInfos)
2465  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
2466 
2467  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2468  const armnn::BaseDescriptor& descriptor,
2469  const std::vector<armnn::ConstTensor>& constants,
2470  const char* name,
2471  const armnn::LayerBindingId id = 0) override
2472  {
2473  armnn::IgnoreUnused(descriptor, constants, id);
2474  switch (layer->GetType())
2475  {
2476  case armnn::LayerType::Input: break;
2477  case armnn::LayerType::Output: break;
2478  case armnn::LayerType::Constant: break;
2480  {
2481  VerifyNameAndConnections(layer, name);
2482  break;
2483  }
2484  default:
2485  {
2486  throw armnn::Exception("Unexpected layer type in Switch test model");
2487  }
2488  }
2489  }
2490  };
2491 
2492  const std::string layerName("switch");
2493  const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32, 0.0f, 0, true);
2494 
2495  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2496  armnn::ConstTensor constTensor(info, constantData);
2497 
2499  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2500  armnn::IConnectableLayer* const constantLayer = network->AddConstantLayer(constTensor, "constant");
2501  armnn::IConnectableLayer* const switchLayer = network->AddSwitchLayer(layerName.c_str());
2502  armnn::IConnectableLayer* const trueOutputLayer = network->AddOutputLayer(0);
2503  armnn::IConnectableLayer* const falseOutputLayer = network->AddOutputLayer(1);
2504 
2505  inputLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(0));
2506  constantLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(1));
2507  switchLayer->GetOutputSlot(0).Connect(trueOutputLayer->GetInputSlot(0));
2508  switchLayer->GetOutputSlot(1).Connect(falseOutputLayer->GetInputSlot(0));
2509 
2510  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2511  constantLayer->GetOutputSlot(0).SetTensorInfo(info);
2512  switchLayer->GetOutputSlot(0).SetTensorInfo(info);
2513  switchLayer->GetOutputSlot(1).SetTensorInfo(info);
2514 
2515  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2516  CHECK(deserializedNetwork);
2517 
2518  SwitchLayerVerifier verifier(layerName, {info, info}, {info, info});
2519  deserializedNetwork->ExecuteStrategy(verifier);
2520 }
2521 
2522 TEST_CASE("SerializeTranspose")
2523 {
2524  const std::string layerName("transpose");
2525  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
2526  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
2527 
2528  armnn::TransposeDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
2529 
2531  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2532  armnn::IConnectableLayer* const transposeLayer = network->AddTransposeLayer(descriptor, layerName.c_str());
2533  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2534 
2535  inputLayer->GetOutputSlot(0).Connect(transposeLayer->GetInputSlot(0));
2536  transposeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2537 
2538  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2539  transposeLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2540 
2541  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2542  CHECK(deserializedNetwork);
2543 
2545  layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
2546  deserializedNetwork->ExecuteStrategy(verifier);
2547 }
2548 
2549 TEST_CASE("SerializeTransposeConvolution2d")
2550 {
2551  const std::string layerName("transposeConvolution2d");
2552  const armnn::TensorInfo inputInfo ({ 1, 7, 7, 1 }, armnn::DataType::Float32);
2553  const armnn::TensorInfo outputInfo({ 1, 9, 9, 1 }, armnn::DataType::Float32);
2554 
2555  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true);
2556  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
2557 
2558  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
2559  armnn::ConstTensor weights(weightsInfo, weightsData);
2560 
2561  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
2562  armnn::ConstTensor biases(biasesInfo, biasesData);
2563 
2565  descriptor.m_PadLeft = 1;
2566  descriptor.m_PadRight = 1;
2567  descriptor.m_PadTop = 1;
2568  descriptor.m_PadBottom = 1;
2569  descriptor.m_StrideX = 1;
2570  descriptor.m_StrideY = 1;
2571  descriptor.m_BiasEnabled = true;
2573 
2575  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2576  armnn::IConnectableLayer* const convLayer =
2577  network->AddTransposeConvolution2dLayer(descriptor,
2578  weights,
2580  layerName.c_str());
2581  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2582 
2583  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2584  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2585 
2586  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2587  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2588 
2589  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2590  CHECK(deserializedNetwork);
2591 
2592  const std::vector<armnn::ConstTensor> constants {weights, biases};
2594  layerName, {inputInfo}, {outputInfo}, descriptor, constants);
2595  deserializedNetwork->ExecuteStrategy(verifier);
2596 }
2597 
2598 TEST_CASE("SerializeDeserializeNonLinearNetwork")
2599 {
2600  class ConstantLayerVerifier : public LayerVerifierBase
2601  {
2602  public:
2603  ConstantLayerVerifier(const std::string& layerName,
2604  const std::vector<armnn::TensorInfo>& inputInfos,
2605  const std::vector<armnn::TensorInfo>& outputInfos,
2606  const armnn::ConstTensor& layerInput)
2607  : LayerVerifierBase(layerName, inputInfos, outputInfos)
2608  , m_LayerInput(layerInput) {}
2609 
2610  void ExecuteStrategy(const armnn::IConnectableLayer* layer,
2611  const armnn::BaseDescriptor& descriptor,
2612  const std::vector<armnn::ConstTensor>& constants,
2613  const char* name,
2614  const armnn::LayerBindingId id = 0) override
2615  {
2616  armnn::IgnoreUnused(descriptor, constants, id);
2617  switch (layer->GetType())
2618  {
2619  case armnn::LayerType::Input: break;
2620  case armnn::LayerType::Output: break;
2621  case armnn::LayerType::Addition: break;
2623  {
2624  VerifyNameAndConnections(layer, name);
2625  CompareConstTensor(constants.at(0), m_LayerInput);
2626  break;
2627  }
2628  default:
2629  {
2630  throw armnn::Exception("Unexpected layer type in test model");
2631  }
2632  }
2633  }
2634 
2635  private:
2636  armnn::ConstTensor m_LayerInput;
2637  };
2638 
2639  const std::string layerName("constant");
2640  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32, 0.0f, 0, true);
2641 
2642  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2643  armnn::ConstTensor constTensor(info, constantData);
2644 
2646  armnn::IConnectableLayer* input = network->AddInputLayer(0);
2647  armnn::IConnectableLayer* add = network->AddAdditionLayer();
2648  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
2649  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
2650 
2651  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
2652  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
2653  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2654 
2655  input->GetOutputSlot(0).SetTensorInfo(info);
2656  constant->GetOutputSlot(0).SetTensorInfo(info);
2657  add->GetOutputSlot(0).SetTensorInfo(info);
2658 
2659  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2660  CHECK(deserializedNetwork);
2661 
2662  ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
2663  deserializedNetwork->ExecuteStrategy(verifier);
2664 }
2665 
2666 }
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
float m_ScaleW
Center size encoding scale weight.
bool m_BiasEnabled
Enable/disable bias.
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
uint32_t m_PadBottom
Padding bottom value in the height dimension.
A ReshapeDescriptor for the ReshapeLayer.
armnn::INetworkPtr DeserializeNetwork(const std::string &serializerString)
uint32_t m_PadBack
Padding back value in the depth dimension.
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ComparisonDescriptor for the ComparisonLayer.
Definition: Descriptors.hpp:89
float m_ScaleX
Center size encoding scale x.
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A Convolution2dDescriptor for the Convolution2dLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
bool m_BiasEnabled
Enable/disable bias.
ResizeMethod m_Method
The Interpolation method to use (Bilinear, NearestNeighbor).
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
float m_Beta
Exponentiation value.
The padding fields don&#39;t count and are ignored.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
Definition: Descriptors.hpp:81
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
A LogicalBinaryDescriptor for the LogicalBinaryLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Copyright (c) 2021 ARM Limited and Contributors.
DataLayout m_DataLayout
The data layout to be used (NCDHW, NDHWC).
void IgnoreUnused(Ts &&...)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_DilationY
Dilation along y axis.
int32_t m_EndMask
End mask value.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_DilationY
Dilation factor value for height dimension.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:277
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A ResizeBilinearDescriptor for the ResizeBilinearLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
Base class for all descriptors.
Definition: Descriptors.hpp:22
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
A StackDescriptor for the StackLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
constexpr char const * GetUnaryOperationAsCString(UnaryOperation operation)
Definition: TypesUtils.hpp:71
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
A PadDescriptor for the PadLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
DataType
Definition: Types.hpp:35
float m_NmsIouThreshold
Intersection over union threshold.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
uint32_t m_DilationX
Dilation factor value for width dimension.
uint32_t m_PadTop
Padding top value in the height dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A L2NormalizationDescriptor for the L2NormalizationLayer.
void VerifyNameAndConnections(const armnn::IConnectableLayer *layer, const char *name)
An ArgMinMaxDescriptor for ArgMinMaxLayer.
Definition: Descriptors.hpp:67
An OriginsDescriptor for the ConcatLayer.
A ReduceDescriptor for the REDUCE operators.
A FullyConnectedDescriptor for the FullyConnectedLayer.
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
uint32_t m_TargetWidth
Target width value.
A GatherDescriptor for the GatherLayer.
uint32_t m_NumClasses
Number of classes.
bool m_HalfPixelCenters
Half Pixel Centers.
uint32_t m_PadTop
Padding top value in the height dimension.
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:475
A StandInDescriptor for the StandIn layer.
bool m_UseRegularNms
Use Regular NMS.
uint32_t m_PadFront
Padding front value in the depth dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadLeft
Padding left value in the width dimension.
uint32_t m_TargetHeight
Target height value.
A SliceDescriptor for the SliceLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A Convolution3dDescriptor for the Convolution3dLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
PaddingMode m_PaddingMode
Specifies the Padding mode (Constant, Reflect or Symmetric)
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A Pooling3dDescriptor for the Pooling3dLayer.
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
float m_ScaleH
Center size encoding scale height.
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
DataLayout m_DataLayout
The data layout to be used (NDHWC, NCDHW).
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_PadLeft
Padding left value in the width dimension.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
bool m_AlignCorners
Aligned corners.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int32_t m_Axis
The axis in params to gather indices from.
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
uint32_t m_PadTop
Padding top value in the height dimension.
void CompareConstTensor(const armnn::ConstTensor &tensor1, const armnn::ConstTensor &tensor2)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
A MeanDescriptor for the MeanLayer.
UnaryOperation
Definition: Types.hpp:111
uint32_t m_PadRight
Padding right value in the width dimension.
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
void SetQuantizationOffset(int32_t offset)
Definition: Tensor.cpp:491
float m_ScaleY
Center size encoding scale y.
OriginsDescriptor CreateDescriptorForConcatenation(TensorShapeIt first, TensorShapeIt last, unsigned int concatenationDimension)
Convenience template to create an OriginsDescriptor to use when creating a ConcatLayer for performing...
float m_NmsScoreThreshold
NMS score threshold.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:241
virtual int Connect(IInputSlot &destination)=0
A Pooling2dDescriptor for the Pooling2dLayer.
std::string SerializeNetwork(const armnn::INetwork &network)
A NormalizationDescriptor for the NormalizationLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
A ChannelShuffleDescriptor for the ChannelShuffle operator.
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:492
uint32_t m_DilationZ
Dilation along z axis.
A SoftmaxDescriptor for the SoftmaxLayer.
uint32_t m_StrideZ
Stride value when proceeding through input for the depth dimension.
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
uint32_t m_DilationY
Dilation along y axis.
A FillDescriptor for the FillLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0) override
A PermuteDescriptor for the PermuteLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
bool m_ConstantWeights
Enable/disable constant weights and biases.