ArmNN  NotReleased
SerializerTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "../Serializer.hpp"
7 
8 #include <armnn/Descriptors.hpp>
9 #include <armnn/INetwork.hpp>
10 #include <armnn/TypesUtils.hpp>
11 #include <armnn/LstmParams.hpp>
14 
15 #include <random>
16 #include <vector>
17 
18 #include <boost/test/unit_test.hpp>
19 
21 
22 namespace
23 {
24 
25 #define DECLARE_LAYER_VERIFIER_CLASS(name) \
26 class name##LayerVerifier : public LayerVerifierBase \
27 { \
28 public: \
29  name##LayerVerifier(const std::string& layerName, \
30  const std::vector<armnn::TensorInfo>& inputInfos, \
31  const std::vector<armnn::TensorInfo>& outputInfos) \
32  : LayerVerifierBase(layerName, inputInfos, outputInfos) {} \
33 \
34  void Visit##name##Layer(const armnn::IConnectableLayer* layer, const char* name) override \
35  { \
36  VerifyNameAndConnections(layer, name); \
37  } \
38 };
39 
40 #define DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(name) \
41 class name##LayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::name##Descriptor> \
42 { \
43 public: \
44  name##LayerVerifier(const std::string& layerName, \
45  const std::vector<armnn::TensorInfo>& inputInfos, \
46  const std::vector<armnn::TensorInfo>& outputInfos, \
47  const armnn::name##Descriptor& descriptor) \
48  : LayerVerifierBaseWithDescriptor<armnn::name##Descriptor>( \
49  layerName, inputInfos, outputInfos, descriptor) {} \
50 \
51  void Visit##name##Layer(const armnn::IConnectableLayer* layer, \
52  const armnn::name##Descriptor& descriptor, \
53  const char* name) override \
54  { \
55  VerifyNameAndConnections(layer, name); \
56  VerifyDescriptor(descriptor); \
57  } \
58 };
59 
60 struct DefaultLayerVerifierPolicy
61 {
62  static void Apply(const std::string)
63  {
64  BOOST_TEST_MESSAGE("Unexpected layer found in network");
65  BOOST_TEST(false);
66  }
67 };
68 
69 class LayerVerifierBase : public armnn::LayerVisitorBase<DefaultLayerVerifierPolicy>
70 {
71 public:
72  LayerVerifierBase(const std::string& layerName,
73  const std::vector<armnn::TensorInfo>& inputInfos,
74  const std::vector<armnn::TensorInfo>& outputInfos)
75  : m_LayerName(layerName)
76  , m_InputTensorInfos(inputInfos)
77  , m_OutputTensorInfos(outputInfos) {}
78 
79  void VisitInputLayer(const armnn::IConnectableLayer*, armnn::LayerBindingId, const char*) override {}
80 
81  void VisitOutputLayer(const armnn::IConnectableLayer*, armnn::LayerBindingId, const char*) override {}
82 
83 protected:
84  void VerifyNameAndConnections(const armnn::IConnectableLayer* layer, const char* name)
85  {
86  BOOST_TEST(name == m_LayerName.c_str());
87 
88  BOOST_TEST(layer->GetNumInputSlots() == m_InputTensorInfos.size());
89  BOOST_TEST(layer->GetNumOutputSlots() == m_OutputTensorInfos.size());
90 
91  for (unsigned int i = 0; i < m_InputTensorInfos.size(); i++)
92  {
93  const armnn::IOutputSlot* connectedOutput = layer->GetInputSlot(i).GetConnection();
94  BOOST_CHECK(connectedOutput);
95 
96  const armnn::TensorInfo& connectedInfo = connectedOutput->GetTensorInfo();
97  BOOST_TEST(connectedInfo.GetShape() == m_InputTensorInfos[i].GetShape());
98  BOOST_TEST(
99  GetDataTypeName(connectedInfo.GetDataType()) == GetDataTypeName(m_InputTensorInfos[i].GetDataType()));
100 
101  BOOST_TEST(connectedInfo.GetQuantizationScale() == m_InputTensorInfos[i].GetQuantizationScale());
102  BOOST_TEST(connectedInfo.GetQuantizationOffset() == m_InputTensorInfos[i].GetQuantizationOffset());
103  }
104 
105  for (unsigned int i = 0; i < m_OutputTensorInfos.size(); i++)
106  {
107  const armnn::TensorInfo& outputInfo = layer->GetOutputSlot(i).GetTensorInfo();
108  BOOST_TEST(outputInfo.GetShape() == m_OutputTensorInfos[i].GetShape());
109  BOOST_TEST(
110  GetDataTypeName(outputInfo.GetDataType()) == GetDataTypeName(m_OutputTensorInfos[i].GetDataType()));
111 
112  BOOST_TEST(outputInfo.GetQuantizationScale() == m_OutputTensorInfos[i].GetQuantizationScale());
113  BOOST_TEST(outputInfo.GetQuantizationOffset() == m_OutputTensorInfos[i].GetQuantizationOffset());
114  }
115  }
116 
117  void VerifyConstTensors(const std::string& tensorName,
118  const armnn::ConstTensor* expectedPtr,
119  const armnn::ConstTensor* actualPtr)
120  {
121  if (expectedPtr == nullptr)
122  {
123  BOOST_CHECK_MESSAGE(actualPtr == nullptr, tensorName + " should not exist");
124  }
125  else
126  {
127  BOOST_CHECK_MESSAGE(actualPtr != nullptr, tensorName + " should have been set");
128  if (actualPtr != nullptr)
129  {
130  const armnn::TensorInfo& expectedInfo = expectedPtr->GetInfo();
131  const armnn::TensorInfo& actualInfo = actualPtr->GetInfo();
132 
133  BOOST_CHECK_MESSAGE(expectedInfo.GetShape() == actualInfo.GetShape(),
134  tensorName + " shapes don't match");
135  BOOST_CHECK_MESSAGE(
136  GetDataTypeName(expectedInfo.GetDataType()) == GetDataTypeName(actualInfo.GetDataType()),
137  tensorName + " data types don't match");
138 
139  BOOST_CHECK_MESSAGE(expectedPtr->GetNumBytes() == actualPtr->GetNumBytes(),
140  tensorName + " (GetNumBytes) data sizes do not match");
141  if (expectedPtr->GetNumBytes() == actualPtr->GetNumBytes())
142  {
143  //check the data is identical
144  const char* expectedData = static_cast<const char*>(expectedPtr->GetMemoryArea());
145  const char* actualData = static_cast<const char*>(actualPtr->GetMemoryArea());
146  bool same = true;
147  for (unsigned int i = 0; i < expectedPtr->GetNumBytes(); ++i)
148  {
149  same = expectedData[i] == actualData[i];
150  if (!same)
151  {
152  break;
153  }
154  }
155  BOOST_CHECK_MESSAGE(same, tensorName + " data does not match");
156  }
157  }
158  }
159  }
160 
161 private:
162  std::string m_LayerName;
163  std::vector<armnn::TensorInfo> m_InputTensorInfos;
164  std::vector<armnn::TensorInfo> m_OutputTensorInfos;
165 };
166 
167 template<typename Descriptor>
168 class LayerVerifierBaseWithDescriptor : public LayerVerifierBase
169 {
170 public:
171  LayerVerifierBaseWithDescriptor(const std::string& layerName,
172  const std::vector<armnn::TensorInfo>& inputInfos,
173  const std::vector<armnn::TensorInfo>& outputInfos,
174  const Descriptor& descriptor)
175  : LayerVerifierBase(layerName, inputInfos, outputInfos)
176  , m_Descriptor(descriptor) {}
177 
178 protected:
179  void VerifyDescriptor(const Descriptor& descriptor)
180  {
181  BOOST_CHECK(descriptor == m_Descriptor);
182  }
183 
184  Descriptor m_Descriptor;
185 };
186 
187 template<typename T>
188 void CompareConstTensorData(const void* data1, const void* data2, unsigned int numElements)
189 {
190  T typedData1 = static_cast<T>(data1);
191  T typedData2 = static_cast<T>(data2);
192  BOOST_CHECK(typedData1);
193  BOOST_CHECK(typedData2);
194 
195  for (unsigned int i = 0; i < numElements; i++)
196  {
197  BOOST_TEST(typedData1[i] == typedData2[i]);
198  }
199 }
200 
201 void CompareConstTensor(const armnn::ConstTensor& tensor1, const armnn::ConstTensor& tensor2)
202 {
203  BOOST_TEST(tensor1.GetShape() == tensor2.GetShape());
204  BOOST_TEST(GetDataTypeName(tensor1.GetDataType()) == GetDataTypeName(tensor2.GetDataType()));
205 
206  switch (tensor1.GetDataType())
207  {
209  CompareConstTensorData<const float*>(
210  tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
211  break;
214  CompareConstTensorData<const uint8_t*>(
215  tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
216  break;
218  CompareConstTensorData<const int32_t*>(
219  tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
220  break;
221  default:
222  // Note that Float16 is not yet implemented
223  BOOST_TEST_MESSAGE("Unexpected datatype");
224  BOOST_TEST(false);
225  }
226 }
227 
228 armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString)
229 {
230  std::vector<std::uint8_t> const serializerVector{serializerString.begin(), serializerString.end()};
231  return IDeserializer::Create()->CreateNetworkFromBinary(serializerVector);
232 }
233 
234 std::string SerializeNetwork(const armnn::INetwork& network)
235 {
237  serializer.Serialize(network);
238 
239  std::stringstream stream;
240  serializer.SaveSerializedToStream(stream);
241 
242  std::string serializerString{stream.str()};
243  return serializerString;
244 }
245 
246 template<typename DataType>
247 static std::vector<DataType> GenerateRandomData(size_t size)
248 {
249  constexpr bool isIntegerType = std::is_integral<DataType>::value;
250  using Distribution =
251  typename std::conditional<isIntegerType,
252  std::uniform_int_distribution<DataType>,
253  std::uniform_real_distribution<DataType>>::type;
254 
255  static constexpr DataType lowerLimit = std::numeric_limits<DataType>::min();
256  static constexpr DataType upperLimit = std::numeric_limits<DataType>::max();
257 
258  static Distribution distribution(lowerLimit, upperLimit);
259  static std::default_random_engine generator;
260 
261  std::vector<DataType> randomData(size);
262  std::generate(randomData.begin(), randomData.end(), []() { return distribution(generator); });
263 
264  return randomData;
265 }
266 
267 } // anonymous namespace
268 
269 BOOST_AUTO_TEST_SUITE(SerializerTests)
270 
271 BOOST_AUTO_TEST_CASE(SerializeAddition)
272 {
274 
275  const std::string layerName("addition");
276  const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32);
277 
279  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
280  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
281  armnn::IConnectableLayer* const additionLayer = network->AddAdditionLayer(layerName.c_str());
282  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
283 
284  inputLayer0->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
285  inputLayer1->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
286  additionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
287 
288  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
289  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
290  additionLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
291 
292  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
293  BOOST_CHECK(deserializedNetwork);
294 
295  AdditionLayerVerifier verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo});
296  deserializedNetwork->Accept(verifier);
297 }
298 
299 BOOST_AUTO_TEST_CASE(SerializeArgMinMax)
300 {
302 
303  const std::string layerName("argminmax");
304  const armnn::TensorInfo inputInfo({1, 2, 3}, armnn::DataType::Float32);
305  const armnn::TensorInfo outputInfo({1, 3}, armnn::DataType::Signed32);
306 
307  armnn::ArgMinMaxDescriptor descriptor;
309  descriptor.m_Axis = 1;
310 
312  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
313  armnn::IConnectableLayer* const argMinMaxLayer = network->AddArgMinMaxLayer(descriptor, layerName.c_str());
314  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
315 
316  inputLayer->GetOutputSlot(0).Connect(argMinMaxLayer->GetInputSlot(0));
317  argMinMaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
318 
319  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
320  argMinMaxLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
321 
322  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
323  BOOST_CHECK(deserializedNetwork);
324 
325  ArgMinMaxLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
326  deserializedNetwork->Accept(verifier);
327 }
328 
329 BOOST_AUTO_TEST_CASE(SerializeBatchNormalization)
330 {
331  using Descriptor = armnn::BatchNormalizationDescriptor;
332  class BatchNormalizationLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
333  {
334  public:
335  BatchNormalizationLayerVerifier(const std::string& layerName,
336  const std::vector<armnn::TensorInfo>& inputInfos,
337  const std::vector<armnn::TensorInfo>& outputInfos,
338  const Descriptor& descriptor,
339  const armnn::ConstTensor& mean,
340  const armnn::ConstTensor& variance,
341  const armnn::ConstTensor& beta,
342  const armnn::ConstTensor& gamma)
343  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
344  , m_Mean(mean)
345  , m_Variance(variance)
346  , m_Beta(beta)
347  , m_Gamma(gamma) {}
348 
349  void VisitBatchNormalizationLayer(const armnn::IConnectableLayer* layer,
350  const Descriptor& descriptor,
351  const armnn::ConstTensor& mean,
352  const armnn::ConstTensor& variance,
353  const armnn::ConstTensor& beta,
354  const armnn::ConstTensor& gamma,
355  const char* name) override
356  {
357  VerifyNameAndConnections(layer, name);
358  VerifyDescriptor(descriptor);
359 
360  CompareConstTensor(mean, m_Mean);
361  CompareConstTensor(variance, m_Variance);
362  CompareConstTensor(beta, m_Beta);
363  CompareConstTensor(gamma, m_Gamma);
364  }
365 
366  private:
367  armnn::ConstTensor m_Mean;
368  armnn::ConstTensor m_Variance;
369  armnn::ConstTensor m_Beta;
370  armnn::ConstTensor m_Gamma;
371  };
372 
373  const std::string layerName("batchNormalization");
374  const armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32);
375  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
376 
377  const armnn::TensorInfo meanInfo({1}, armnn::DataType::Float32);
378  const armnn::TensorInfo varianceInfo({1}, armnn::DataType::Float32);
379  const armnn::TensorInfo betaInfo({1}, armnn::DataType::Float32);
380  const armnn::TensorInfo gammaInfo({1}, armnn::DataType::Float32);
381 
383  descriptor.m_Eps = 0.0010000000475f;
384  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
385 
386  std::vector<float> meanData({5.0});
387  std::vector<float> varianceData({2.0});
388  std::vector<float> betaData({1.0});
389  std::vector<float> gammaData({0.0});
390 
391  armnn::ConstTensor mean(meanInfo, meanData);
392  armnn::ConstTensor variance(varianceInfo, varianceData);
393  armnn::ConstTensor beta(betaInfo, betaData);
394  armnn::ConstTensor gamma(gammaInfo, gammaData);
395 
397  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
398  armnn::IConnectableLayer* const batchNormalizationLayer =
399  network->AddBatchNormalizationLayer(descriptor, mean, variance, beta, gamma, layerName.c_str());
400  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
401 
402  inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
403  batchNormalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
404 
405  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
406  batchNormalizationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
407 
408  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
409  BOOST_CHECK(deserializedNetwork);
410 
411  BatchNormalizationLayerVerifier verifier(
412  layerName, {inputInfo}, {outputInfo}, descriptor, mean, variance, beta, gamma);
413  deserializedNetwork->Accept(verifier);
414 }
415 
416 BOOST_AUTO_TEST_CASE(SerializeBatchToSpaceNd)
417 {
419 
420  const std::string layerName("spaceToBatchNd");
421  const armnn::TensorInfo inputInfo({4, 1, 2, 2}, armnn::DataType::Float32);
422  const armnn::TensorInfo outputInfo({1, 1, 4, 4}, armnn::DataType::Float32);
423 
426  desc.m_BlockShape = {2, 2};
427  desc.m_Crops = {{0, 0}, {0, 0}};
428 
430  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
431  armnn::IConnectableLayer* const batchToSpaceNdLayer = network->AddBatchToSpaceNdLayer(desc, layerName.c_str());
432  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
433 
434  inputLayer->GetOutputSlot(0).Connect(batchToSpaceNdLayer->GetInputSlot(0));
435  batchToSpaceNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
436 
437  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
438  batchToSpaceNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
439 
440  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
441  BOOST_CHECK(deserializedNetwork);
442 
443  BatchToSpaceNdLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
444  deserializedNetwork->Accept(verifier);
445 }
446 
447 BOOST_AUTO_TEST_CASE(SerializeComparison)
448 {
450 
451  const std::string layerName("comparison");
452 
453  const armnn::TensorShape shape{2, 1, 2, 4};
454 
457 
459 
461  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
462  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
463  armnn::IConnectableLayer* const comparisonLayer = network->AddComparisonLayer(descriptor, layerName.c_str());
464  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
465 
466  inputLayer0->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(0));
467  inputLayer1->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(1));
468  comparisonLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
469 
470  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
471  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
472  comparisonLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
473 
474  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
475  BOOST_CHECK(deserializedNetwork);
476 
477  ComparisonLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
478  deserializedNetwork->Accept(verifier);
479 }
480 
481 BOOST_AUTO_TEST_CASE(SerializeConstant)
482 {
483  class ConstantLayerVerifier : public LayerVerifierBase
484  {
485  public:
486  ConstantLayerVerifier(const std::string& layerName,
487  const std::vector<armnn::TensorInfo>& inputInfos,
488  const std::vector<armnn::TensorInfo>& outputInfos,
489  const armnn::ConstTensor& layerInput)
490  : LayerVerifierBase(layerName, inputInfos, outputInfos)
491  , m_LayerInput(layerInput) {}
492 
493  void VisitConstantLayer(const armnn::IConnectableLayer* layer,
494  const armnn::ConstTensor& input,
495  const char* name) override
496  {
497  VerifyNameAndConnections(layer, name);
498  CompareConstTensor(input, m_LayerInput);
499  }
500 
501  void VisitAdditionLayer(const armnn::IConnectableLayer*, const char*) override {}
502 
503  private:
504  armnn::ConstTensor m_LayerInput;
505  };
506 
507  const std::string layerName("constant");
508  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32);
509 
510  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
511  armnn::ConstTensor constTensor(info, constantData);
512 
514  armnn::IConnectableLayer* input = network->AddInputLayer(0);
515  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
516  armnn::IConnectableLayer* add = network->AddAdditionLayer();
517  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
518 
519  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
520  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
521  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
522 
523  input->GetOutputSlot(0).SetTensorInfo(info);
524  constant->GetOutputSlot(0).SetTensorInfo(info);
525  add->GetOutputSlot(0).SetTensorInfo(info);
526 
527  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
528  BOOST_CHECK(deserializedNetwork);
529 
530  ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
531  deserializedNetwork->Accept(verifier);
532 }
533 
534 BOOST_AUTO_TEST_CASE(SerializeConvolution2d)
535 {
536  using Descriptor = armnn::Convolution2dDescriptor;
537  class Convolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
538  {
539  public:
540  Convolution2dLayerVerifier(const std::string& layerName,
541  const std::vector<armnn::TensorInfo>& inputInfos,
542  const std::vector<armnn::TensorInfo>& outputInfos,
543  const Descriptor& descriptor,
544  const armnn::ConstTensor& weights,
546  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
547  , m_Weights(weights)
548  , m_Biases(biases) {}
549 
550  void VisitConvolution2dLayer(const armnn::IConnectableLayer* layer,
551  const Descriptor& descriptor,
552  const armnn::ConstTensor& weights,
554  const char* name) override
555  {
556  VerifyNameAndConnections(layer, name);
557  VerifyDescriptor(descriptor);
558 
559  // check weights
560  CompareConstTensor(weights, m_Weights);
561 
562  // check biases
563  BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
564  BOOST_CHECK(biases.has_value() == m_Biases.has_value());
565 
566  if (biases.has_value() && m_Biases.has_value())
567  {
568  CompareConstTensor(biases.value(), m_Biases.value());
569  }
570  }
571 
572  private:
573  armnn::ConstTensor m_Weights;
575  };
576 
577  const std::string layerName("convolution2d");
578  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
579  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
580 
581  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
582  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32);
583 
584  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
585  armnn::ConstTensor weights(weightsInfo, weightsData);
586 
587  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
588  armnn::ConstTensor biases(biasesInfo, biasesData);
589 
591  descriptor.m_PadLeft = 1;
592  descriptor.m_PadRight = 1;
593  descriptor.m_PadTop = 1;
594  descriptor.m_PadBottom = 1;
595  descriptor.m_StrideX = 2;
596  descriptor.m_StrideY = 2;
597  descriptor.m_DilationX = 2;
598  descriptor.m_DilationY = 2;
599  descriptor.m_BiasEnabled = true;
601 
603  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
604  armnn::IConnectableLayer* const convLayer =
605  network->AddConvolution2dLayer(descriptor,
606  weights,
608  layerName.c_str());
609  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
610 
611  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
612  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
613 
614  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
615  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
616 
617  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
618  BOOST_CHECK(deserializedNetwork);
619 
620  Convolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
621  deserializedNetwork->Accept(verifier);
622 }
623 
624 BOOST_AUTO_TEST_CASE(SerializeDepthToSpace)
625 {
627 
628  const std::string layerName("depthToSpace");
629 
630  const armnn::TensorInfo inputInfo ({ 1, 8, 4, 12 }, armnn::DataType::Float32);
631  const armnn::TensorInfo outputInfo({ 1, 16, 8, 3 }, armnn::DataType::Float32);
632 
634  desc.m_BlockSize = 2;
635  desc.m_DataLayout = armnn::DataLayout::NHWC;
636 
638  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
639  armnn::IConnectableLayer* const depthToSpaceLayer = network->AddDepthToSpaceLayer(desc, layerName.c_str());
640  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
641 
642  inputLayer->GetOutputSlot(0).Connect(depthToSpaceLayer->GetInputSlot(0));
643  depthToSpaceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
644 
645  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
646  depthToSpaceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
647 
648  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
649  BOOST_CHECK(deserializedNetwork);
650 
651  DepthToSpaceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
652  deserializedNetwork->Accept(verifier);
653 }
654 
655 BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2d)
656 {
657  using Descriptor = armnn::DepthwiseConvolution2dDescriptor;
658  class DepthwiseConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
659  {
660  public:
661  DepthwiseConvolution2dLayerVerifier(const std::string& layerName,
662  const std::vector<armnn::TensorInfo>& inputInfos,
663  const std::vector<armnn::TensorInfo>& outputInfos,
664  const Descriptor& descriptor,
665  const armnn::ConstTensor& weights,
666  const armnn::Optional<armnn::ConstTensor>& biases) :
667  LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor),
668  m_Weights(weights),
669  m_Biases(biases) {}
670 
671  void VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
672  const Descriptor& descriptor,
673  const armnn::ConstTensor& weights,
675  const char* name) override
676  {
677  VerifyNameAndConnections(layer, name);
678  VerifyDescriptor(descriptor);
679 
680  // check weights
681  CompareConstTensor(weights, m_Weights);
682 
683  // check biases
684  BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
685  BOOST_CHECK(biases.has_value() == m_Biases.has_value());
686 
687  if (biases.has_value() && m_Biases.has_value())
688  {
689  CompareConstTensor(biases.value(), m_Biases.value());
690  }
691  }
692 
693  private:
694  armnn::ConstTensor m_Weights;
696  };
697 
698  const std::string layerName("depwiseConvolution2d");
699  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32);
700  const armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
701 
702  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
703  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
704 
705  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
706  armnn::ConstTensor weights(weightsInfo, weightsData);
707 
708  std::vector<int32_t> biasesData = GenerateRandomData<int32_t>(biasesInfo.GetNumElements());
709  armnn::ConstTensor biases(biasesInfo, biasesData);
710 
712  descriptor.m_PadLeft = 1;
713  descriptor.m_PadRight = 1;
714  descriptor.m_PadTop = 1;
715  descriptor.m_PadBottom = 1;
716  descriptor.m_StrideX = 2;
717  descriptor.m_StrideY = 2;
718  descriptor.m_DilationX = 2;
719  descriptor.m_DilationY = 2;
720  descriptor.m_BiasEnabled = true;
722 
724  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
725  armnn::IConnectableLayer* const depthwiseConvLayer =
726  network->AddDepthwiseConvolution2dLayer(descriptor,
727  weights,
729  layerName.c_str());
730  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
731 
732  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
733  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
734 
735  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
736  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
737 
738  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
739  BOOST_CHECK(deserializedNetwork);
740 
741  DepthwiseConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
742  deserializedNetwork->Accept(verifier);
743 }
744 
745 BOOST_AUTO_TEST_CASE(SerializeDequantize)
746 {
748 
749  const std::string layerName("dequantize");
750  const armnn::TensorInfo inputInfo({ 1, 5, 2, 3 }, armnn::DataType::QAsymmU8, 0.5f, 1);
751  const armnn::TensorInfo outputInfo({ 1, 5, 2, 3 }, armnn::DataType::Float32);
752 
754  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
755  armnn::IConnectableLayer* const dequantizeLayer = network->AddDequantizeLayer(layerName.c_str());
756  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
757 
758  inputLayer->GetOutputSlot(0).Connect(dequantizeLayer->GetInputSlot(0));
759  dequantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
760 
761  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
762  dequantizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
763 
764  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
765  BOOST_CHECK(deserializedNetwork);
766 
767  DequantizeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo});
768  deserializedNetwork->Accept(verifier);
769 }
770 
771 BOOST_AUTO_TEST_CASE(SerializeDeserializeDetectionPostProcess)
772 {
773  using Descriptor = armnn::DetectionPostProcessDescriptor;
774  class DetectionPostProcessLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
775  {
776  public:
777  DetectionPostProcessLayerVerifier(const std::string& layerName,
778  const std::vector<armnn::TensorInfo>& inputInfos,
779  const std::vector<armnn::TensorInfo>& outputInfos,
780  const Descriptor& descriptor,
782  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
783  , m_Anchors(anchors) {}
784 
785  void VisitDetectionPostProcessLayer(const armnn::IConnectableLayer* layer,
786  const Descriptor& descriptor,
787  const armnn::ConstTensor& anchors,
788  const char* name) override
789  {
790  VerifyNameAndConnections(layer, name);
791  VerifyDescriptor(descriptor);
792 
793  CompareConstTensor(anchors, m_Anchors);
794  }
795 
796  private:
797  armnn::ConstTensor m_Anchors;
798  };
799 
800  const std::string layerName("detectionPostProcess");
801 
802  const std::vector<armnn::TensorInfo> inputInfos({
805  });
806 
807  const std::vector<armnn::TensorInfo> outputInfos({
812  });
813 
815  descriptor.m_UseRegularNms = true;
816  descriptor.m_MaxDetections = 3;
817  descriptor.m_MaxClassesPerDetection = 1;
818  descriptor.m_DetectionsPerClass =1;
819  descriptor.m_NmsScoreThreshold = 0.0;
820  descriptor.m_NmsIouThreshold = 0.5;
821  descriptor.m_NumClasses = 2;
822  descriptor.m_ScaleY = 10.0;
823  descriptor.m_ScaleX = 10.0;
824  descriptor.m_ScaleH = 5.0;
825  descriptor.m_ScaleW = 5.0;
826 
828  const std::vector<float> anchorsData({
829  0.5f, 0.5f, 1.0f, 1.0f,
830  0.5f, 0.5f, 1.0f, 1.0f,
831  0.5f, 0.5f, 1.0f, 1.0f,
832  0.5f, 10.5f, 1.0f, 1.0f,
833  0.5f, 10.5f, 1.0f, 1.0f,
834  0.5f, 100.5f, 1.0f, 1.0f
835  });
836  armnn::ConstTensor anchors(anchorsInfo, anchorsData);
837 
839  armnn::IConnectableLayer* const detectionLayer =
840  network->AddDetectionPostProcessLayer(descriptor, anchors, layerName.c_str());
841 
842  for (unsigned int i = 0; i < 2; i++)
843  {
844  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(static_cast<int>(i));
845  inputLayer->GetOutputSlot(0).Connect(detectionLayer->GetInputSlot(i));
846  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfos[i]);
847  }
848 
849  for (unsigned int i = 0; i < 4; i++)
850  {
851  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(static_cast<int>(i));
852  detectionLayer->GetOutputSlot(i).Connect(outputLayer->GetInputSlot(0));
853  detectionLayer->GetOutputSlot(i).SetTensorInfo(outputInfos[i]);
854  }
855 
856  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
857  BOOST_CHECK(deserializedNetwork);
858 
859  DetectionPostProcessLayerVerifier verifier(layerName, inputInfos, outputInfos, descriptor, anchors);
860  deserializedNetwork->Accept(verifier);
861 }
862 
863 BOOST_AUTO_TEST_CASE(SerializeDivision)
864 {
866 
867  const std::string layerName("division");
868  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
869 
871  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
872  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
873  armnn::IConnectableLayer* const divisionLayer = network->AddDivisionLayer(layerName.c_str());
874  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
875 
876  inputLayer0->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(0));
877  inputLayer1->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(1));
878  divisionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
879 
880  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
881  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
882  divisionLayer->GetOutputSlot(0).SetTensorInfo(info);
883 
884  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
885  BOOST_CHECK(deserializedNetwork);
886 
887  DivisionLayerVerifier verifier(layerName, {info, info}, {info});
888  deserializedNetwork->Accept(verifier);
889 }
890 
891 class EqualLayerVerifier : public LayerVerifierBase
892 {
893 public:
894  EqualLayerVerifier(const std::string& layerName,
895  const std::vector<armnn::TensorInfo>& inputInfos,
896  const std::vector<armnn::TensorInfo>& outputInfos)
897  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
898 
899  void VisitComparisonLayer(const armnn::IConnectableLayer* layer,
900  const armnn::ComparisonDescriptor& descriptor,
901  const char* name) override
902  {
903  VerifyNameAndConnections(layer, name);
905  }
906 
907  void VisitEqualLayer(const armnn::IConnectableLayer*, const char*) override
908  {
909  throw armnn::Exception("EqualLayer should have translated to ComparisonLayer");
910  }
911 };
912 
913 // NOTE: Until the deprecated AddEqualLayer disappears this test checks that calling
914 // AddEqualLayer places a ComparisonLayer into the serialized format and that
915 // when this deserialises we have a ComparisonLayer
916 BOOST_AUTO_TEST_CASE(SerializeEqual)
917 {
918  const std::string layerName("equal");
919 
920  const armnn::TensorShape shape{2, 1, 2, 4};
921 
924 
926  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
927  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
929  armnn::IConnectableLayer* const equalLayer = network->AddEqualLayer(layerName.c_str());
931  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
932 
933  inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
934  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
935  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
936 
937  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
938  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
939  equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
940 
941  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
942  BOOST_CHECK(deserializedNetwork);
943 
944  EqualLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
945  deserializedNetwork->Accept(verifier);
946 }
947 
948 BOOST_AUTO_TEST_CASE(EnsureEqualBackwardCompatibility)
949 {
950  // The hex data below is a flat buffer containing a simple network with two inputs,
951  // an EqualLayer (now deprecated) and an output
952  //
953  // This test verifies that we can still deserialize this old-style model by replacing
954  // the EqualLayer with an equivalent ComparisonLayer
955  const std::vector<uint8_t> equalModel =
956  {
957  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
958  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
959  0xCC, 0x01, 0x00, 0x00, 0x20, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
960  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
961  0x60, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
962  0x00, 0x00, 0x06, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xEA, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
963  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
964  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
965  0x64, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xB4, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
966  0x00, 0x13, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
967  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
968  0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x71, 0x75, 0x61, 0x6C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
969  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
970  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x04, 0x08, 0x00, 0x00, 0x00,
971  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
972  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
973  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
974  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
975  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
976  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
977  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
978  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
979  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
980  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
981  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
982  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
983  0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
984  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
985  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
986  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
987  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
988  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
989  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
990  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
991  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
992  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
993  0x04, 0x00, 0x00, 0x00
994  };
995 
996  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(equalModel.begin(), equalModel.end()));
997  BOOST_CHECK(deserializedNetwork);
998 
999  const armnn::TensorShape shape{ 2, 1, 2, 4 };
1000 
1003 
1004  EqualLayerVerifier verifier("equal", { inputInfo, inputInfo }, { outputInfo });
1005  deserializedNetwork->Accept(verifier);
1006 }
1007 
1008 BOOST_AUTO_TEST_CASE(SerializeFloor)
1009 {
1011 
1012  const std::string layerName("floor");
1013  const armnn::TensorInfo info({4,4}, armnn::DataType::Float32);
1014 
1016  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1017  armnn::IConnectableLayer* const floorLayer = network->AddFloorLayer(layerName.c_str());
1018  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1019 
1020  inputLayer->GetOutputSlot(0).Connect(floorLayer->GetInputSlot(0));
1021  floorLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1022 
1023  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1024  floorLayer->GetOutputSlot(0).SetTensorInfo(info);
1025 
1026  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1027  BOOST_CHECK(deserializedNetwork);
1028 
1029  FloorLayerVerifier verifier(layerName, {info}, {info});
1030  deserializedNetwork->Accept(verifier);
1031 }
1032 
1033 BOOST_AUTO_TEST_CASE(SerializeFullyConnected)
1034 {
1035  using Descriptor = armnn::FullyConnectedDescriptor;
1036  class FullyConnectedLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
1037  {
1038  public:
1039  FullyConnectedLayerVerifier(const std::string& layerName,
1040  const std::vector<armnn::TensorInfo>& inputInfos,
1041  const std::vector<armnn::TensorInfo>& outputInfos,
1042  const Descriptor& descriptor,
1043  const armnn::ConstTensor& weight,
1045  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
1046  , m_Weight(weight)
1047  , m_Bias(bias) {}
1048 
1049  void VisitFullyConnectedLayer(const armnn::IConnectableLayer* layer,
1050  const Descriptor& descriptor,
1051  const armnn::ConstTensor& weight,
1053  const char* name) override
1054  {
1055  VerifyNameAndConnections(layer, name);
1056  VerifyDescriptor(descriptor);
1057 
1058  CompareConstTensor(weight, m_Weight);
1059 
1060  BOOST_TEST(bias.has_value() == descriptor.m_BiasEnabled);
1061  BOOST_TEST(bias.has_value() == m_Bias.has_value());
1062 
1063  if (bias.has_value() && m_Bias.has_value())
1064  {
1065  CompareConstTensor(bias.value(), m_Bias.value());
1066  }
1067  }
1068 
1069  private:
1070  armnn::ConstTensor m_Weight;
1072  };
1073 
1074  const std::string layerName("fullyConnected");
1075  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1076  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1077 
1078  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32);
1079  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
1080  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
1081  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
1082  armnn::ConstTensor weights(weightsInfo, weightsData);
1083  armnn::ConstTensor biases(biasesInfo, biasesData);
1084 
1086  descriptor.m_BiasEnabled = true;
1087  descriptor.m_TransposeWeightMatrix = false;
1088 
1090  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1091  armnn::IConnectableLayer* const fullyConnectedLayer =
1092  network->AddFullyConnectedLayer(descriptor,
1093  weights,
1095  layerName.c_str());
1096  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1097 
1098  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1099  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1100 
1101  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1102  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1103 
1104  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1105  BOOST_CHECK(deserializedNetwork);
1106 
1107  FullyConnectedLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
1108  deserializedNetwork->Accept(verifier);
1109 }
1110 
1111 BOOST_AUTO_TEST_CASE(SerializeGather)
1112 {
1113  class GatherLayerVerifier : public LayerVerifierBase
1114  {
1115  public:
1116  GatherLayerVerifier(const std::string& layerName,
1117  const std::vector<armnn::TensorInfo>& inputInfos,
1118  const std::vector<armnn::TensorInfo>& outputInfos)
1119  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
1120 
1121  void VisitGatherLayer(const armnn::IConnectableLayer* layer, const char *name) override
1122  {
1123  VerifyNameAndConnections(layer, name);
1124  }
1125 
1126  void VisitConstantLayer(const armnn::IConnectableLayer*,
1127  const armnn::ConstTensor&,
1128  const char*) override {}
1129  };
1130 
1131  const std::string layerName("gather");
1132  armnn::TensorInfo paramsInfo({ 8 }, armnn::DataType::QAsymmU8);
1133  armnn::TensorInfo outputInfo({ 3 }, armnn::DataType::QAsymmU8);
1134  const armnn::TensorInfo indicesInfo({ 3 }, armnn::DataType::Signed32);
1135 
1136  paramsInfo.SetQuantizationScale(1.0f);
1137  paramsInfo.SetQuantizationOffset(0);
1138  outputInfo.SetQuantizationScale(1.0f);
1139  outputInfo.SetQuantizationOffset(0);
1140 
1141  const std::vector<int32_t>& indicesData = {7, 6, 5};
1142 
1144  armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
1145  armnn::IConnectableLayer *const constantLayer =
1146  network->AddConstantLayer(armnn::ConstTensor(indicesInfo, indicesData));
1147  armnn::IConnectableLayer *const gatherLayer = network->AddGatherLayer(layerName.c_str());
1148  armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
1149 
1150  inputLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(0));
1151  constantLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(1));
1152  gatherLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1153 
1154  inputLayer->GetOutputSlot(0).SetTensorInfo(paramsInfo);
1155  constantLayer->GetOutputSlot(0).SetTensorInfo(indicesInfo);
1156  gatherLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1157 
1158  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1159  BOOST_CHECK(deserializedNetwork);
1160 
1161  GatherLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo});
1162  deserializedNetwork->Accept(verifier);
1163 }
1164 
1165 class GreaterLayerVerifier : public LayerVerifierBase
1166 {
1167 public:
1168  GreaterLayerVerifier(const std::string& layerName,
1169  const std::vector<armnn::TensorInfo>& inputInfos,
1170  const std::vector<armnn::TensorInfo>& outputInfos)
1171  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
1172 
1173  void VisitComparisonLayer(const armnn::IConnectableLayer* layer,
1174  const armnn::ComparisonDescriptor& descriptor,
1175  const char* name) override
1176  {
1177  VerifyNameAndConnections(layer, name);
1179  }
1180 
1181  void VisitGreaterLayer(const armnn::IConnectableLayer*, const char*) override
1182  {
1183  throw armnn::Exception("GreaterLayer should have translated to ComparisonLayer");
1184  }
1185 };
1186 
1187 // NOTE: Until the deprecated AddGreaterLayer disappears this test checks that calling
1188 // AddGreaterLayer places a ComparisonLayer into the serialized format and that
1189 // when this deserialises we have a ComparisonLayer
1190 BOOST_AUTO_TEST_CASE(SerializeGreater)
1191 {
1192  const std::string layerName("greater");
1193 
1194  const armnn::TensorShape shape{2, 1, 2, 4};
1195 
1198 
1200  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1201  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1203  armnn::IConnectableLayer* const equalLayer = network->AddGreaterLayer(layerName.c_str());
1205  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1206 
1207  inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
1208  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
1209  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1210 
1211  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1212  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1213  equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1214 
1215  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1216  BOOST_CHECK(deserializedNetwork);
1217 
1218  GreaterLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
1219  deserializedNetwork->Accept(verifier);
1220 }
1221 
1222 BOOST_AUTO_TEST_CASE(EnsureGreaterBackwardCompatibility)
1223 {
1224  // The hex data below is a flat buffer containing a simple network with two inputs,
1225  // an GreaterLayer (now deprecated) and an output
1226  //
1227  // This test verifies that we can still deserialize this old-style model by replacing
1228  // the GreaterLayer with an equivalent ComparisonLayer
1229  const std::vector<uint8_t> greaterModel =
1230  {
1231  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1232  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1233  0xCC, 0x01, 0x00, 0x00, 0x20, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1234  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1235  0x60, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1236  0x00, 0x00, 0x06, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xEA, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1237  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1238  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1239  0x64, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xB4, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1240  0x00, 0x19, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1241  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1242  0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x67, 0x72, 0x65, 0x61, 0x74, 0x65, 0x72, 0x00, 0x02, 0x00, 0x00, 0x00,
1243  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1244  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x04, 0x08, 0x00, 0x00, 0x00,
1245  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1246  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1247  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1248  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1249  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1250  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1251  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1252  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1253  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1254  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1255  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1256  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1257  0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1258  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1259  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1260  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1261  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1262  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1263  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1264  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1265  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1266  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1267  0x02, 0x00, 0x00, 0x00
1268  };
1269 
1270  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(greaterModel.begin(), greaterModel.end()));
1271  BOOST_CHECK(deserializedNetwork);
1272 
1273  const armnn::TensorShape shape{ 1, 2, 2, 2 };
1274 
1277 
1278  GreaterLayerVerifier verifier("greater", { inputInfo, inputInfo }, { outputInfo });
1279  deserializedNetwork->Accept(verifier);
1280 }
1281 
1282 BOOST_AUTO_TEST_CASE(SerializeInstanceNormalization)
1283 {
1284  DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(InstanceNormalization)
1285 
1286  const std::string layerName("instanceNormalization");
1287  const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32);
1288 
1290  descriptor.m_Gamma = 1.1f;
1291  descriptor.m_Beta = 0.1f;
1292  descriptor.m_Eps = 0.0001f;
1293  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
1294 
1296  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1297  armnn::IConnectableLayer* const instanceNormLayer =
1298  network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1299  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1300 
1301  inputLayer->GetOutputSlot(0).Connect(instanceNormLayer->GetInputSlot(0));
1302  instanceNormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1303 
1304  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1305  instanceNormLayer->GetOutputSlot(0).SetTensorInfo(info);
1306 
1307  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1308  BOOST_CHECK(deserializedNetwork);
1309 
1310  InstanceNormalizationLayerVerifier verifier(layerName, {info}, {info}, descriptor);
1311  deserializedNetwork->Accept(verifier);
1312 }
1313 
1315 
1316 BOOST_AUTO_TEST_CASE(SerializeL2Normalization)
1317 {
1318  const std::string l2NormLayerName("l2Normalization");
1319  const armnn::TensorInfo info({1, 2, 1, 5}, armnn::DataType::Float32);
1320 
1323  desc.m_Eps = 0.0001f;
1324 
1326  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1327  armnn::IConnectableLayer* const l2NormLayer = network->AddL2NormalizationLayer(desc, l2NormLayerName.c_str());
1328  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1329 
1330  inputLayer0->GetOutputSlot(0).Connect(l2NormLayer->GetInputSlot(0));
1331  l2NormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1332 
1333  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1334  l2NormLayer->GetOutputSlot(0).SetTensorInfo(info);
1335 
1336  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1337  BOOST_CHECK(deserializedNetwork);
1338 
1339  L2NormalizationLayerVerifier verifier(l2NormLayerName, {info}, {info}, desc);
1340  deserializedNetwork->Accept(verifier);
1341 }
1342 
1343 BOOST_AUTO_TEST_CASE(EnsureL2NormalizationBackwardCompatibility)
1344 {
1345  // The hex data below is a flat buffer containing a simple network with one input
1346  // a L2Normalization layer and an output layer with dimensions as per the tensor infos below.
1347  //
1348  // This test verifies that we can still read back these old style
1349  // models without the normalization epsilon value.
1350  const std::vector<uint8_t> l2NormalizationModel =
1351  {
1352  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1353  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1354  0x3C, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1355  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1356  0x04, 0x00, 0x00, 0x00, 0xD6, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
1357  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
1358  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1359  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1360  0x4C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
1361  0x00, 0x20, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1362  0x20, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x06, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1363  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1364  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1F, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x20, 0x00,
1365  0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x6C, 0x32, 0x4E, 0x6F, 0x72, 0x6D, 0x61, 0x6C, 0x69, 0x7A, 0x61, 0x74,
1366  0x69, 0x6F, 0x6E, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00,
1367  0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1368  0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
1369  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
1370  0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1371  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1372  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1373  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1374  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1375  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1376  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1377  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1378  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1379  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1380  0x05, 0x00, 0x00, 0x00, 0x00
1381  };
1382 
1383  armnn::INetworkPtr deserializedNetwork =
1384  DeserializeNetwork(std::string(l2NormalizationModel.begin(), l2NormalizationModel.end()));
1385  BOOST_CHECK(deserializedNetwork);
1386 
1387  const std::string layerName("l2Normalization");
1388  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 2, 1, 5}, armnn::DataType::Float32);
1389 
1392  // Since this variable does not exist in the l2NormalizationModel dump, the default value will be loaded
1393  desc.m_Eps = 1e-12f;
1394 
1395  L2NormalizationLayerVerifier verifier(layerName, {inputInfo}, {inputInfo}, desc);
1396  deserializedNetwork->Accept(verifier);
1397 }
1398 
1399 BOOST_AUTO_TEST_CASE(SerializeLogSoftmax)
1400 {
1402 
1403  const std::string layerName("log_softmax");
1404  const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32);
1405 
1406  armnn::LogSoftmaxDescriptor descriptor;
1407  descriptor.m_Beta = 1.0f;
1408  descriptor.m_Axis = -1;
1409 
1411  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1412  armnn::IConnectableLayer* const logSoftmaxLayer = network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1413  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1414 
1415  inputLayer->GetOutputSlot(0).Connect(logSoftmaxLayer->GetInputSlot(0));
1416  logSoftmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1417 
1418  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1419  logSoftmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
1420 
1421  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1422  BOOST_CHECK(deserializedNetwork);
1423 
1424  LogSoftmaxLayerVerifier verifier(layerName, {info}, {info}, descriptor);
1425  deserializedNetwork->Accept(verifier);
1426 }
1427 
1428 BOOST_AUTO_TEST_CASE(SerializeMaximum)
1429 {
1431 
1432  const std::string layerName("maximum");
1433  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1434 
1436  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1437  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1438  armnn::IConnectableLayer* const maximumLayer = network->AddMaximumLayer(layerName.c_str());
1439  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1440 
1441  inputLayer0->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(0));
1442  inputLayer1->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(1));
1443  maximumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1444 
1445  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1446  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1447  maximumLayer->GetOutputSlot(0).SetTensorInfo(info);
1448 
1449  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1450  BOOST_CHECK(deserializedNetwork);
1451 
1452  MaximumLayerVerifier verifier(layerName, {info, info}, {info});
1453  deserializedNetwork->Accept(verifier);
1454 }
1455 
1456 BOOST_AUTO_TEST_CASE(SerializeMean)
1457 {
1459 
1460  const std::string layerName("mean");
1461  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1462  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1463 
1464  armnn::MeanDescriptor descriptor;
1465  descriptor.m_Axis = { 2 };
1466  descriptor.m_KeepDims = true;
1467 
1469  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1470  armnn::IConnectableLayer* const meanLayer = network->AddMeanLayer(descriptor, layerName.c_str());
1471  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1472 
1473  inputLayer->GetOutputSlot(0).Connect(meanLayer->GetInputSlot(0));
1474  meanLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1475 
1476  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1477  meanLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1478 
1479  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1480  BOOST_CHECK(deserializedNetwork);
1481 
1482  MeanLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1483  deserializedNetwork->Accept(verifier);
1484 }
1485 
1486 BOOST_AUTO_TEST_CASE(SerializeMerge)
1487 {
1489 
1490  const std::string layerName("merge");
1491  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1492 
1494  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1495  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1496  armnn::IConnectableLayer* const mergeLayer = network->AddMergeLayer(layerName.c_str());
1497  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1498 
1499  inputLayer0->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(0));
1500  inputLayer1->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(1));
1501  mergeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1502 
1503  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1504  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1505  mergeLayer->GetOutputSlot(0).SetTensorInfo(info);
1506 
1507  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1508  BOOST_CHECK(deserializedNetwork);
1509 
1510  MergeLayerVerifier verifier(layerName, {info, info}, {info});
1511  deserializedNetwork->Accept(verifier);
1512 }
1513 
1514 class MergerLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>
1515 {
1516 public:
1517  MergerLayerVerifier(const std::string& layerName,
1518  const std::vector<armnn::TensorInfo>& inputInfos,
1519  const std::vector<armnn::TensorInfo>& outputInfos,
1520  const armnn::OriginsDescriptor& descriptor)
1521  : LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1522 
1523  void VisitMergerLayer(const armnn::IConnectableLayer*,
1524  const armnn::OriginsDescriptor&,
1525  const char*) override
1526  {
1527  throw armnn::Exception("MergerLayer should have translated to ConcatLayer");
1528  }
1529 
1530  void VisitConcatLayer(const armnn::IConnectableLayer* layer,
1531  const armnn::OriginsDescriptor& descriptor,
1532  const char* name) override
1533  {
1534  VerifyNameAndConnections(layer, name);
1535  VerifyDescriptor(descriptor);
1536  }
1537 };
1538 
1539 // NOTE: Until the deprecated AddMergerLayer disappears this test checks that calling
1540 // AddMergerLayer places a ConcatLayer into the serialized format and that
1541 // when this deserialises we have a ConcatLayer
1542 BOOST_AUTO_TEST_CASE(SerializeMerger)
1543 {
1544  const std::string layerName("merger");
1545  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1546  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1547 
1548  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1549 
1550  armnn::OriginsDescriptor descriptor =
1551  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1552 
1554  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1555  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1557  armnn::IConnectableLayer* const mergerLayer = network->AddMergerLayer(descriptor, layerName.c_str());
1559  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1560 
1561  inputLayerOne->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
1562  inputLayerTwo->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
1563  mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1564 
1565  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1566  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1567  mergerLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1568 
1569  std::string mergerLayerNetwork = SerializeNetwork(*network);
1570  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(mergerLayerNetwork);
1571  BOOST_CHECK(deserializedNetwork);
1572 
1573  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1574  deserializedNetwork->Accept(verifier);
1575 }
1576 
1577 BOOST_AUTO_TEST_CASE(EnsureMergerLayerBackwardCompatibility)
1578 {
1579  // The hex data below is a flat buffer containing a simple network with two inputs
1580  // a merger layer (now deprecated) and an output layer with dimensions as per the tensor infos below.
1581  //
1582  // This test verifies that we can still read back these old style
1583  // models replacing the MergerLayers with ConcatLayers with the same parameters.
1584  const std::vector<uint8_t> mergerModel =
1585  {
1586  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1587  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1588  0x38, 0x02, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1589  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1590  0xF4, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1591  0x00, 0x00, 0x9A, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1592  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1593  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1594  0xF8, 0xFE, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1595  0x00, 0x1F, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1596  0x68, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1597  0x0C, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1598  0x02, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x22, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1599  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1600  0x00, 0x00, 0x00, 0x00, 0x3E, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1601  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1602  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1603  0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x6D, 0x65, 0x72, 0x67, 0x65, 0x72, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1604  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1605  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1606  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00,
1607  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1608  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1609  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1610  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1611  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1612  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1613  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1614  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1615  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1616  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1617  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1618  0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1619  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1620  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1621  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1622  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1623  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1624  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1625  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1626  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1627  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1628  0x02, 0x00, 0x00, 0x00
1629  };
1630 
1631  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(mergerModel.begin(), mergerModel.end()));
1632  BOOST_CHECK(deserializedNetwork);
1633 
1634  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 2, 3, 2, 2 }, armnn::DataType::Float32);
1635  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 4, 3, 2, 2 }, armnn::DataType::Float32);
1636 
1637  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1638 
1639  armnn::OriginsDescriptor descriptor =
1640  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1641 
1642  MergerLayerVerifier verifier("merger", { inputInfo, inputInfo }, { outputInfo }, descriptor);
1643  deserializedNetwork->Accept(verifier);
1644 }
1645 
1646 BOOST_AUTO_TEST_CASE(SerializeConcat)
1647 {
1648  const std::string layerName("concat");
1649  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1650  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1651 
1652  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1653 
1654  armnn::OriginsDescriptor descriptor =
1655  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1656 
1658  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1659  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1660  armnn::IConnectableLayer* const concatLayer = network->AddConcatLayer(descriptor, layerName.c_str());
1661  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1662 
1663  inputLayerOne->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
1664  inputLayerTwo->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
1665  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1666 
1667  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1668  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1669  concatLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1670 
1671  std::string concatLayerNetwork = SerializeNetwork(*network);
1672  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(concatLayerNetwork);
1673  BOOST_CHECK(deserializedNetwork);
1674 
1675  // NOTE: using the MergerLayerVerifier to ensure that it is a concat layer and not a
1676  // merger layer that gets placed into the graph.
1677  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1678  deserializedNetwork->Accept(verifier);
1679 }
1680 
1681 BOOST_AUTO_TEST_CASE(SerializeMinimum)
1682 {
1684 
1685  const std::string layerName("minimum");
1686  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1687 
1689  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1690  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1691  armnn::IConnectableLayer* const minimumLayer = network->AddMinimumLayer(layerName.c_str());
1692  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1693 
1694  inputLayer0->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(0));
1695  inputLayer1->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(1));
1696  minimumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1697 
1698  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1699  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1700  minimumLayer->GetOutputSlot(0).SetTensorInfo(info);
1701 
1702  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1703  BOOST_CHECK(deserializedNetwork);
1704 
1705  MinimumLayerVerifier verifier(layerName, {info, info}, {info});
1706  deserializedNetwork->Accept(verifier);
1707 }
1708 
1709 BOOST_AUTO_TEST_CASE(SerializeMultiplication)
1710 {
1711  DECLARE_LAYER_VERIFIER_CLASS(Multiplication)
1712 
1713  const std::string layerName("multiplication");
1714  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
1715 
1717  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1718  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1719  armnn::IConnectableLayer* const multiplicationLayer = network->AddMultiplicationLayer(layerName.c_str());
1720  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1721 
1722  inputLayer0->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
1723  inputLayer1->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
1724  multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1725 
1726  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1727  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1728  multiplicationLayer->GetOutputSlot(0).SetTensorInfo(info);
1729 
1730  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1731  BOOST_CHECK(deserializedNetwork);
1732 
1733  MultiplicationLayerVerifier verifier(layerName, {info, info}, {info});
1734  deserializedNetwork->Accept(verifier);
1735 }
1736 
1737 BOOST_AUTO_TEST_CASE(SerializePrelu)
1738 {
1740 
1741  const std::string layerName("prelu");
1742 
1743  armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32);
1744  armnn::TensorInfo alphaTensorInfo ({ 5, 4, 3, 1 }, armnn::DataType::Float32);
1745  armnn::TensorInfo outputTensorInfo({ 5, 4, 3, 2 }, armnn::DataType::Float32);
1746 
1748  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1749  armnn::IConnectableLayer* const alphaLayer = network->AddInputLayer(1);
1750  armnn::IConnectableLayer* const preluLayer = network->AddPreluLayer(layerName.c_str());
1751  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1752 
1753  inputLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(0));
1754  alphaLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(1));
1755  preluLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1756 
1757  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1758  alphaLayer->GetOutputSlot(0).SetTensorInfo(alphaTensorInfo);
1759  preluLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1760 
1761  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1762  BOOST_CHECK(deserializedNetwork);
1763 
1764  PreluLayerVerifier verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
1765  deserializedNetwork->Accept(verifier);
1766 }
1767 
1768 BOOST_AUTO_TEST_CASE(SerializeNormalization)
1769 {
1771 
1772  const std::string layerName("normalization");
1773  const armnn::TensorInfo info({2, 1, 2, 2}, armnn::DataType::Float32);
1774 
1777  desc.m_NormSize = 3;
1778  desc.m_Alpha = 1;
1779  desc.m_Beta = 1;
1780  desc.m_K = 1;
1781 
1783  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1784  armnn::IConnectableLayer* const normalizationLayer = network->AddNormalizationLayer(desc, layerName.c_str());
1785  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1786 
1787  inputLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
1788  normalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1789 
1790  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1791  normalizationLayer->GetOutputSlot(0).SetTensorInfo(info);
1792 
1793  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1794  BOOST_CHECK(deserializedNetwork);
1795 
1796  NormalizationLayerVerifier verifier(layerName, {info}, {info}, desc);
1797  deserializedNetwork->Accept(verifier);
1798 }
1799 
1801 
1803 {
1804  const std::string layerName("pad");
1805  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1806  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
1807 
1808  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
1809 
1811  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1812  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
1813  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1814 
1815  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
1816  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1817 
1818  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1819  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1820 
1821  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1822  BOOST_CHECK(deserializedNetwork);
1823 
1824  PadLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, desc);
1825  deserializedNetwork->Accept(verifier);
1826 }
1827 
1828 BOOST_AUTO_TEST_CASE(EnsurePadBackwardCompatibility)
1829 {
1830  // The PadDescriptor is being extended with a float PadValue (so a value other than 0
1831  // can be used to pad the tensor.
1832  //
1833  // This test contains a binary representation of a simple input->pad->output network
1834  // prior to this change to test that the descriptor has been updated in a backward
1835  // compatible way with respect to Deserialization of older binary dumps
1836  const std::vector<uint8_t> padModel =
1837  {
1838  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1839  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1840  0x54, 0x01, 0x00, 0x00, 0x6C, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1841  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD0, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1842  0x04, 0x00, 0x00, 0x00, 0x96, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1843  0x00, 0x00, 0x72, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1844  0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
1845  0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00,
1846  0x00, 0x00, 0x00, 0x00, 0x24, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x16, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00,
1847  0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x4C, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1848  0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00,
1849  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1850  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
1851  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1852  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00,
1853  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x70, 0x61, 0x64, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00,
1854  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1855  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1856  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
1857  0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
1858  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00,
1859  0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00,
1860  0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1861  0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00,
1862  0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1863  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
1864  0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
1865  0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
1866  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
1867  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00
1868  };
1869 
1870  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(padModel.begin(), padModel.end()));
1871  BOOST_CHECK(deserializedNetwork);
1872 
1873  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 1, 2, 3, 4 }, armnn::DataType::Float32);
1874  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 1, 3, 5, 7 }, armnn::DataType::Float32);
1875 
1876  armnn::PadDescriptor descriptor({{ 0, 0 }, { 1, 0 }, { 1, 1 }, { 1, 2 }});
1877 
1878  PadLayerVerifier verifier("pad", { inputInfo }, { outputInfo }, descriptor);
1879  deserializedNetwork->Accept(verifier);
1880 }
1881 
1882 BOOST_AUTO_TEST_CASE(SerializePermute)
1883 {
1885 
1886  const std::string layerName("permute");
1887  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
1888  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
1889 
1890  armnn::PermuteDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
1891 
1893  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1894  armnn::IConnectableLayer* const permuteLayer = network->AddPermuteLayer(descriptor, layerName.c_str());
1895  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1896 
1897  inputLayer->GetOutputSlot(0).Connect(permuteLayer->GetInputSlot(0));
1898  permuteLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1899 
1900  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1901  permuteLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1902 
1903  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1904  BOOST_CHECK(deserializedNetwork);
1905 
1906  PermuteLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
1907  deserializedNetwork->Accept(verifier);
1908 }
1909 
1910 BOOST_AUTO_TEST_CASE(SerializePooling2d)
1911 {
1913 
1914  const std::string layerName("pooling2d");
1915  const armnn::TensorInfo inputInfo({1, 2, 2, 1}, armnn::DataType::Float32);
1916  const armnn::TensorInfo outputInfo({1, 1, 1, 1}, armnn::DataType::Float32);
1917 
1920  desc.m_PadTop = 0;
1921  desc.m_PadBottom = 0;
1922  desc.m_PadLeft = 0;
1923  desc.m_PadRight = 0;
1924  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
1925  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1926  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1927  desc.m_PoolHeight = 2;
1928  desc.m_PoolWidth = 2;
1929  desc.m_StrideX = 2;
1930  desc.m_StrideY = 2;
1931 
1933  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1934  armnn::IConnectableLayer* const pooling2dLayer = network->AddPooling2dLayer(desc, layerName.c_str());
1935  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1936 
1937  inputLayer->GetOutputSlot(0).Connect(pooling2dLayer->GetInputSlot(0));
1938  pooling2dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1939 
1940  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1941  pooling2dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1942 
1943  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1944  BOOST_CHECK(deserializedNetwork);
1945 
1946  Pooling2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
1947  deserializedNetwork->Accept(verifier);
1948 }
1949 
1950 BOOST_AUTO_TEST_CASE(SerializeQuantize)
1951 {
1953 
1954  const std::string layerName("quantize");
1955  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1956 
1958  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1959  armnn::IConnectableLayer* const quantizeLayer = network->AddQuantizeLayer(layerName.c_str());
1960  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1961 
1962  inputLayer->GetOutputSlot(0).Connect(quantizeLayer->GetInputSlot(0));
1963  quantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1964 
1965  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1966  quantizeLayer->GetOutputSlot(0).SetTensorInfo(info);
1967 
1968  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1969  BOOST_CHECK(deserializedNetwork);
1970 
1971  QuantizeLayerVerifier verifier(layerName, {info}, {info});
1972  deserializedNetwork->Accept(verifier);
1973 }
1974 
1975 BOOST_AUTO_TEST_CASE(SerializeReshape)
1976 {
1978 
1979  const std::string layerName("reshape");
1980  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
1981  const armnn::TensorInfo outputInfo({3, 3}, armnn::DataType::Float32);
1982 
1983  armnn::ReshapeDescriptor descriptor({3, 3});
1984 
1986  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1987  armnn::IConnectableLayer* const reshapeLayer = network->AddReshapeLayer(descriptor, layerName.c_str());
1988  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1989 
1990  inputLayer->GetOutputSlot(0).Connect(reshapeLayer->GetInputSlot(0));
1991  reshapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1992 
1993  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1994  reshapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1995 
1996  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1997  BOOST_CHECK(deserializedNetwork);
1998 
1999  ReshapeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2000  deserializedNetwork->Accept(verifier);
2001 }
2002 
2003 BOOST_AUTO_TEST_CASE(SerializeResize)
2004 {
2006 
2007  const std::string layerName("resize");
2008  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2009  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2010 
2012  desc.m_TargetWidth = 4;
2013  desc.m_TargetHeight = 2;
2014  desc.m_Method = armnn::ResizeMethod::NearestNeighbor;
2015 
2017  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2018  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
2019  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2020 
2021  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2022  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2023 
2024  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2025  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2026 
2027  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2028  BOOST_CHECK(deserializedNetwork);
2029 
2030  ResizeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2031  deserializedNetwork->Accept(verifier);
2032 }
2033 
2034 class ResizeBilinearLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::ResizeBilinearDescriptor>
2035 {
2036 public:
2037  ResizeBilinearLayerVerifier(const std::string& layerName,
2038  const std::vector<armnn::TensorInfo>& inputInfos,
2039  const std::vector<armnn::TensorInfo>& outputInfos,
2040  const armnn::ResizeBilinearDescriptor& descriptor)
2041  : LayerVerifierBaseWithDescriptor<armnn::ResizeBilinearDescriptor>(
2042  layerName, inputInfos, outputInfos, descriptor) {}
2043 
2044  void VisitResizeLayer(const armnn::IConnectableLayer* layer,
2045  const armnn::ResizeDescriptor& descriptor,
2046  const char* name) override
2047  {
2048  VerifyNameAndConnections(layer, name);
2049 
2051  BOOST_CHECK(descriptor.m_TargetWidth == m_Descriptor.m_TargetWidth);
2052  BOOST_CHECK(descriptor.m_TargetHeight == m_Descriptor.m_TargetHeight);
2053  BOOST_CHECK(descriptor.m_DataLayout == m_Descriptor.m_DataLayout);
2054  }
2055 
2056  void VisitResizeBilinearLayer(const armnn::IConnectableLayer*,
2058  const char*) override
2059  {
2060  throw armnn::Exception("ResizeBilinearLayer should have translated to ResizeLayer");
2061  }
2062 };
2063 
2064 // NOTE: Until the deprecated AddResizeBilinearLayer disappears this test checks that
2065 // calling AddResizeBilinearLayer places a ResizeLayer into the serialized format
2066 // and that when this deserialises we have a ResizeLayer
2067 BOOST_AUTO_TEST_CASE(SerializeResizeBilinear)
2068 {
2069  const std::string layerName("resizeBilinear");
2070  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2071  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2072 
2074  desc.m_TargetWidth = 4u;
2075  desc.m_TargetHeight = 2u;
2076 
2078  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2080  armnn::IConnectableLayer* const resizeLayer = network->AddResizeBilinearLayer(desc, layerName.c_str());
2082  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2083 
2084  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2085  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2086 
2087  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2088  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2089 
2090  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2091  BOOST_CHECK(deserializedNetwork);
2092 
2093  ResizeBilinearLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2094  deserializedNetwork->Accept(verifier);
2095 }
2096 
2097 BOOST_AUTO_TEST_CASE(EnsureResizeBilinearBackwardCompatibility)
2098 {
2099  // The hex data below is a flat buffer containing a simple network with an input,
2100  // a ResizeBilinearLayer (now deprecated) and an output
2101  //
2102  // This test verifies that we can still deserialize this old-style model by replacing
2103  // the ResizeBilinearLayer with an equivalent ResizeLayer
2104  const std::vector<uint8_t> resizeBilinearModel =
2105  {
2106  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
2107  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
2108  0x50, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
2109  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD4, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
2110  0x04, 0x00, 0x00, 0x00, 0xC2, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
2111  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x8A, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
2112  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
2113  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2114  0x38, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
2115  0x00, 0x1A, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
2116  0x34, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x12, 0x00, 0x08, 0x00, 0x0C, 0x00,
2117  0x07, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
2118  0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00,
2119  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00,
2120  0x20, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x69, 0x7A, 0x65, 0x42, 0x69, 0x6C, 0x69,
2121  0x6E, 0x65, 0x61, 0x72, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
2122  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
2123  0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2124  0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
2125  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2126  0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
2127  0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
2128  0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00,
2129  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
2130  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2131  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00,
2132  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00,
2133  0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
2134  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
2135  0x00, 0x00, 0x05, 0x00, 0x00, 0x00
2136  };
2137 
2138  armnn::INetworkPtr deserializedNetwork =
2139  DeserializeNetwork(std::string(resizeBilinearModel.begin(), resizeBilinearModel.end()));
2140  BOOST_CHECK(deserializedNetwork);
2141 
2142  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2143  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2144 
2146  descriptor.m_TargetWidth = 4u;
2147  descriptor.m_TargetHeight = 2u;
2148 
2149  ResizeBilinearLayerVerifier verifier("resizeBilinear", { inputInfo }, { outputInfo }, descriptor);
2150  deserializedNetwork->Accept(verifier);
2151 }
2152 
2153 BOOST_AUTO_TEST_CASE(SerializeSlice)
2154 {
2156 
2157  const std::string layerName{"slice"};
2158 
2159  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2160  const armnn::TensorInfo outputInfo = armnn::TensorInfo({2, 2, 2, 1}, armnn::DataType::Float32);
2161 
2162  armnn::SliceDescriptor descriptor({ 0, 0, 1, 0}, {2, 2, 2, 1});
2163 
2165 
2166  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2167  armnn::IConnectableLayer* const sliceLayer = network->AddSliceLayer(descriptor, layerName.c_str());
2168  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2169 
2170  inputLayer->GetOutputSlot(0).Connect(sliceLayer->GetInputSlot(0));
2171  sliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2172 
2173  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2174  sliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2175 
2176  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2177  BOOST_CHECK(deserializedNetwork);
2178 
2179  SliceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2180  deserializedNetwork->Accept(verifier);
2181 }
2182 
2183 BOOST_AUTO_TEST_CASE(SerializeSoftmax)
2184 {
2186 
2187  const std::string layerName("softmax");
2188  const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32);
2189 
2190  armnn::SoftmaxDescriptor descriptor;
2191  descriptor.m_Beta = 1.0f;
2192 
2194  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2195  armnn::IConnectableLayer* const softmaxLayer = network->AddSoftmaxLayer(descriptor, layerName.c_str());
2196  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2197 
2198  inputLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
2199  softmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2200 
2201  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2202  softmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
2203 
2204  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2205  BOOST_CHECK(deserializedNetwork);
2206 
2207  SoftmaxLayerVerifier verifier(layerName, {info}, {info}, descriptor);
2208  deserializedNetwork->Accept(verifier);
2209 }
2210 
2211 BOOST_AUTO_TEST_CASE(SerializeSpaceToBatchNd)
2212 {
2214 
2215  const std::string layerName("spaceToBatchNd");
2216  const armnn::TensorInfo inputInfo({2, 1, 2, 4}, armnn::DataType::Float32);
2217  const armnn::TensorInfo outputInfo({8, 1, 1, 3}, armnn::DataType::Float32);
2218 
2221  desc.m_BlockShape = {2, 2};
2222  desc.m_PadList = {{0, 0}, {2, 0}};
2223 
2225  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2226  armnn::IConnectableLayer* const spaceToBatchNdLayer = network->AddSpaceToBatchNdLayer(desc, layerName.c_str());
2227  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2228 
2229  inputLayer->GetOutputSlot(0).Connect(spaceToBatchNdLayer->GetInputSlot(0));
2230  spaceToBatchNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2231 
2232  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2233  spaceToBatchNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2234 
2235  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2236  BOOST_CHECK(deserializedNetwork);
2237 
2238  SpaceToBatchNdLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2239  deserializedNetwork->Accept(verifier);
2240 }
2241 
2242 BOOST_AUTO_TEST_CASE(SerializeSpaceToDepth)
2243 {
2245 
2246  const std::string layerName("spaceToDepth");
2247 
2248  const armnn::TensorInfo inputInfo ({ 1, 16, 8, 3 }, armnn::DataType::Float32);
2249  const armnn::TensorInfo outputInfo({ 1, 8, 4, 12 }, armnn::DataType::Float32);
2250 
2252  desc.m_BlockSize = 2;
2253  desc.m_DataLayout = armnn::DataLayout::NHWC;
2254 
2256  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2257  armnn::IConnectableLayer* const spaceToDepthLayer = network->AddSpaceToDepthLayer(desc, layerName.c_str());
2258  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2259 
2260  inputLayer->GetOutputSlot(0).Connect(spaceToDepthLayer->GetInputSlot(0));
2261  spaceToDepthLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2262 
2263  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2264  spaceToDepthLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2265 
2266  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2267  BOOST_CHECK(deserializedNetwork);
2268 
2269  SpaceToDepthLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2270  deserializedNetwork->Accept(verifier);
2271 }
2272 
2273 BOOST_AUTO_TEST_CASE(SerializeSplitter)
2274 {
2276 
2277  const unsigned int numViews = 3;
2278  const unsigned int numDimensions = 4;
2279  const unsigned int inputShape[] = {1, 18, 4, 4};
2280  const unsigned int outputShape[] = {1, 6, 4, 4};
2281 
2282  // This is modelled on how the caffe parser sets up a splitter layer to partition an input along dimension one.
2283  unsigned int splitterDimSizes[4] = {static_cast<unsigned int>(inputShape[0]),
2284  static_cast<unsigned int>(inputShape[1]),
2285  static_cast<unsigned int>(inputShape[2]),
2286  static_cast<unsigned int>(inputShape[3])};
2287  splitterDimSizes[1] /= numViews;
2288  armnn::ViewsDescriptor desc(numViews, numDimensions);
2289 
2290  for (unsigned int g = 0; g < numViews; ++g)
2291  {
2292  desc.SetViewOriginCoord(g, 1, splitterDimSizes[1] * g);
2293 
2294  for (unsigned int dimIdx=0; dimIdx < 4; dimIdx++)
2295  {
2296  desc.SetViewSize(g, dimIdx, splitterDimSizes[dimIdx]);
2297  }
2298  }
2299 
2300  const std::string layerName("splitter");
2301  const armnn::TensorInfo inputInfo(numDimensions, inputShape, armnn::DataType::Float32);
2302  const armnn::TensorInfo outputInfo(numDimensions, outputShape, armnn::DataType::Float32);
2303 
2305  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2306  armnn::IConnectableLayer* const splitterLayer = network->AddSplitterLayer(desc, layerName.c_str());
2307  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2308  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2309  armnn::IConnectableLayer* const outputLayer2 = network->AddOutputLayer(2);
2310 
2311  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
2312  splitterLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2313  splitterLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2314  splitterLayer->GetOutputSlot(2).Connect(outputLayer2->GetInputSlot(0));
2315 
2316  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2317  splitterLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2318  splitterLayer->GetOutputSlot(1).SetTensorInfo(outputInfo);
2319  splitterLayer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2320 
2321  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2322  BOOST_CHECK(deserializedNetwork);
2323 
2324  SplitterLayerVerifier verifier(layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc);
2325  deserializedNetwork->Accept(verifier);
2326 }
2327 
2328 BOOST_AUTO_TEST_CASE(SerializeStack)
2329 {
2331 
2332  const std::string layerName("stack");
2333 
2334  armnn::TensorInfo inputTensorInfo ({4, 3, 5}, armnn::DataType::Float32);
2335  armnn::TensorInfo outputTensorInfo({4, 3, 2, 5}, armnn::DataType::Float32);
2336 
2337  armnn::StackDescriptor descriptor(2, 2, {4, 3, 5});
2338 
2340  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
2341  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
2342  armnn::IConnectableLayer* const stackLayer = network->AddStackLayer(descriptor, layerName.c_str());
2343  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2344 
2345  inputLayer1->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(0));
2346  inputLayer2->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(1));
2347  stackLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2348 
2349  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2350  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2351  stackLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2352 
2353  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2354  BOOST_CHECK(deserializedNetwork);
2355 
2356  StackLayerVerifier verifier(layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor);
2357  deserializedNetwork->Accept(verifier);
2358 }
2359 
2360 BOOST_AUTO_TEST_CASE(SerializeStandIn)
2361 {
2363 
2364  const std::string layerName("standIn");
2365 
2366  armnn::TensorInfo tensorInfo({ 1u }, armnn::DataType::Float32);
2367  armnn::StandInDescriptor descriptor(2u, 2u);
2368 
2370  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2371  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2372  armnn::IConnectableLayer* const standInLayer = network->AddStandInLayer(descriptor, layerName.c_str());
2373  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2374  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2375 
2376  inputLayer0->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(0));
2377  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2378 
2379  inputLayer1->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(1));
2380  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2381 
2382  standInLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2383  standInLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2384 
2385  standInLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2386  standInLayer->GetOutputSlot(1).SetTensorInfo(tensorInfo);
2387 
2388  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2389  BOOST_CHECK(deserializedNetwork);
2390 
2391  StandInLayerVerifier verifier(layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor);
2392  deserializedNetwork->Accept(verifier);
2393 }
2394 
2395 BOOST_AUTO_TEST_CASE(SerializeStridedSlice)
2396 {
2398 
2399  const std::string layerName("stridedSlice");
2400  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2401  const armnn::TensorInfo outputInfo = armnn::TensorInfo({3, 1}, armnn::DataType::Float32);
2402 
2403  armnn::StridedSliceDescriptor desc({0, 0, 1, 0}, {1, 1, 1, 1}, {1, 1, 1, 1});
2404  desc.m_EndMask = (1 << 4) - 1;
2405  desc.m_ShrinkAxisMask = (1 << 1) | (1 << 2);
2406  desc.m_DataLayout = armnn::DataLayout::NCHW;
2407 
2409  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2410  armnn::IConnectableLayer* const stridedSliceLayer = network->AddStridedSliceLayer(desc, layerName.c_str());
2411  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2412 
2413  inputLayer->GetOutputSlot(0).Connect(stridedSliceLayer->GetInputSlot(0));
2414  stridedSliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2415 
2416  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2417  stridedSliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2418 
2419  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2420  BOOST_CHECK(deserializedNetwork);
2421 
2422  StridedSliceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2423  deserializedNetwork->Accept(verifier);
2424 }
2425 
2426 BOOST_AUTO_TEST_CASE(SerializeSubtraction)
2427 {
2428  DECLARE_LAYER_VERIFIER_CLASS(Subtraction)
2429 
2430  const std::string layerName("subtraction");
2431  const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32);
2432 
2434  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2435  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2436  armnn::IConnectableLayer* const subtractionLayer = network->AddSubtractionLayer(layerName.c_str());
2437  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2438 
2439  inputLayer0->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(0));
2440  inputLayer1->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(1));
2441  subtractionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2442 
2443  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
2444  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
2445  subtractionLayer->GetOutputSlot(0).SetTensorInfo(info);
2446 
2447  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2448  BOOST_CHECK(deserializedNetwork);
2449 
2450  SubtractionLayerVerifier verifier(layerName, {info, info}, {info});
2451  deserializedNetwork->Accept(verifier);
2452 }
2453 
2454 BOOST_AUTO_TEST_CASE(SerializeSwitch)
2455 {
2456  class SwitchLayerVerifier : public LayerVerifierBase
2457  {
2458  public:
2459  SwitchLayerVerifier(const std::string& layerName,
2460  const std::vector<armnn::TensorInfo>& inputInfos,
2461  const std::vector<armnn::TensorInfo>& outputInfos)
2462  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
2463 
2464  void VisitSwitchLayer(const armnn::IConnectableLayer* layer, const char* name) override
2465  {
2466  VerifyNameAndConnections(layer, name);
2467  }
2468 
2469  void VisitConstantLayer(const armnn::IConnectableLayer*,
2470  const armnn::ConstTensor&,
2471  const char*) override {}
2472  };
2473 
2474  const std::string layerName("switch");
2475  const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32);
2476 
2477  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2478  armnn::ConstTensor constTensor(info, constantData);
2479 
2481  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2482  armnn::IConnectableLayer* const constantLayer = network->AddConstantLayer(constTensor, "constant");
2483  armnn::IConnectableLayer* const switchLayer = network->AddSwitchLayer(layerName.c_str());
2484  armnn::IConnectableLayer* const trueOutputLayer = network->AddOutputLayer(0);
2485  armnn::IConnectableLayer* const falseOutputLayer = network->AddOutputLayer(1);
2486 
2487  inputLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(0));
2488  constantLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(1));
2489  switchLayer->GetOutputSlot(0).Connect(trueOutputLayer->GetInputSlot(0));
2490  switchLayer->GetOutputSlot(1).Connect(falseOutputLayer->GetInputSlot(0));
2491 
2492  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2493  constantLayer->GetOutputSlot(0).SetTensorInfo(info);
2494  switchLayer->GetOutputSlot(0).SetTensorInfo(info);
2495  switchLayer->GetOutputSlot(1).SetTensorInfo(info);
2496 
2497  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2498  BOOST_CHECK(deserializedNetwork);
2499 
2500  SwitchLayerVerifier verifier(layerName, {info, info}, {info, info});
2501  deserializedNetwork->Accept(verifier);
2502 }
2503 
2504 BOOST_AUTO_TEST_CASE(SerializeTransposeConvolution2d)
2505 {
2506  using Descriptor = armnn::TransposeConvolution2dDescriptor;
2507  class TransposeConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
2508  {
2509  public:
2510  TransposeConvolution2dLayerVerifier(const std::string& layerName,
2511  const std::vector<armnn::TensorInfo>& inputInfos,
2512  const std::vector<armnn::TensorInfo>& outputInfos,
2513  const Descriptor& descriptor,
2514  const armnn::ConstTensor& weights,
2516  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
2517  , m_Weights(weights)
2518  , m_Biases(biases)
2519  {}
2520 
2521  void VisitTransposeConvolution2dLayer(const armnn::IConnectableLayer* layer,
2522  const Descriptor& descriptor,
2523  const armnn::ConstTensor& weights,
2525  const char* name) override
2526  {
2527  VerifyNameAndConnections(layer, name);
2528  VerifyDescriptor(descriptor);
2529 
2530  // check weights
2531  CompareConstTensor(weights, m_Weights);
2532 
2533  // check biases
2534  BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
2535  BOOST_CHECK(biases.has_value() == m_Biases.has_value());
2536 
2537  if (biases.has_value() && m_Biases.has_value())
2538  {
2539  CompareConstTensor(biases.value(), m_Biases.value());
2540  }
2541  }
2542 
2543  private:
2544  armnn::ConstTensor m_Weights;
2546  };
2547 
2548  const std::string layerName("transposeConvolution2d");
2549  const armnn::TensorInfo inputInfo ({ 1, 7, 7, 1 }, armnn::DataType::Float32);
2550  const armnn::TensorInfo outputInfo({ 1, 9, 9, 1 }, armnn::DataType::Float32);
2551 
2552  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
2553  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32);
2554 
2555  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
2556  armnn::ConstTensor weights(weightsInfo, weightsData);
2557 
2558  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
2559  armnn::ConstTensor biases(biasesInfo, biasesData);
2560 
2562  descriptor.m_PadLeft = 1;
2563  descriptor.m_PadRight = 1;
2564  descriptor.m_PadTop = 1;
2565  descriptor.m_PadBottom = 1;
2566  descriptor.m_StrideX = 1;
2567  descriptor.m_StrideY = 1;
2568  descriptor.m_BiasEnabled = true;
2570 
2572  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2573  armnn::IConnectableLayer* const convLayer =
2574  network->AddTransposeConvolution2dLayer(descriptor,
2575  weights,
2577  layerName.c_str());
2578  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2579 
2580  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2581  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2582 
2583  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2584  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2585 
2586  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2587  BOOST_CHECK(deserializedNetwork);
2588 
2589  TransposeConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
2590  deserializedNetwork->Accept(verifier);
2591 }
2592 
2593 BOOST_AUTO_TEST_CASE(SerializeDeserializeNonLinearNetwork)
2594 {
2595  class ConstantLayerVerifier : public LayerVerifierBase
2596  {
2597  public:
2598  ConstantLayerVerifier(const std::string& layerName,
2599  const std::vector<armnn::TensorInfo>& inputInfos,
2600  const std::vector<armnn::TensorInfo>& outputInfos,
2601  const armnn::ConstTensor& layerInput)
2602  : LayerVerifierBase(layerName, inputInfos, outputInfos)
2603  , m_LayerInput(layerInput) {}
2604 
2605  void VisitConstantLayer(const armnn::IConnectableLayer* layer,
2606  const armnn::ConstTensor& input,
2607  const char* name) override
2608  {
2609  VerifyNameAndConnections(layer, name);
2610  CompareConstTensor(input, m_LayerInput);
2611  }
2612 
2613  void VisitAdditionLayer(const armnn::IConnectableLayer*, const char*) override {}
2614 
2615  private:
2616  armnn::ConstTensor m_LayerInput;
2617  };
2618 
2619  const std::string layerName("constant");
2620  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32);
2621 
2622  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2623  armnn::ConstTensor constTensor(info, constantData);
2624 
2626  armnn::IConnectableLayer* input = network->AddInputLayer(0);
2627  armnn::IConnectableLayer* add = network->AddAdditionLayer();
2628  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
2629  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
2630 
2631  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
2632  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
2633  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2634 
2635  input->GetOutputSlot(0).SetTensorInfo(info);
2636  constant->GetOutputSlot(0).SetTensorInfo(info);
2637  add->GetOutputSlot(0).SetTensorInfo(info);
2638 
2639  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2640  BOOST_CHECK(deserializedNetwork);
2641 
2642  ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
2643  deserializedNetwork->Accept(verifier);
2644 }
2645 
2646 class VerifyLstmLayer : public LayerVerifierBaseWithDescriptor<armnn::LstmDescriptor>
2647 {
2648 public:
2649  VerifyLstmLayer(const std::string& layerName,
2650  const std::vector<armnn::TensorInfo>& inputInfos,
2651  const std::vector<armnn::TensorInfo>& outputInfos,
2652  const armnn::LstmDescriptor& descriptor,
2653  const armnn::LstmInputParams& inputParams)
2654  : LayerVerifierBaseWithDescriptor<armnn::LstmDescriptor>(layerName, inputInfos, outputInfos, descriptor)
2655  , m_InputParams(inputParams) {}
2656 
2657  void VisitLstmLayer(const armnn::IConnectableLayer* layer,
2658  const armnn::LstmDescriptor& descriptor,
2659  const armnn::LstmInputParams& params,
2660  const char* name)
2661  {
2662  VerifyNameAndConnections(layer, name);
2663  VerifyDescriptor(descriptor);
2664  VerifyInputParameters(params);
2665  }
2666 
2667 protected:
2668  void VerifyInputParameters(const armnn::LstmInputParams& params)
2669  {
2670  VerifyConstTensors(
2671  "m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
2672  VerifyConstTensors(
2673  "m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
2674  VerifyConstTensors(
2675  "m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
2676  VerifyConstTensors(
2677  "m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
2678  VerifyConstTensors(
2679  "m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
2680  VerifyConstTensors(
2681  "m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
2682  VerifyConstTensors(
2683  "m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
2684  VerifyConstTensors(
2685  "m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
2686  VerifyConstTensors(
2687  "m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights);
2688  VerifyConstTensors(
2689  "m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights);
2690  VerifyConstTensors(
2691  "m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights);
2692  VerifyConstTensors(
2693  "m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias);
2694  VerifyConstTensors(
2695  "m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
2696  VerifyConstTensors(
2697  "m_CellBias", m_InputParams.m_CellBias, params.m_CellBias);
2698  VerifyConstTensors(
2699  "m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
2700  VerifyConstTensors(
2701  "m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights);
2702  VerifyConstTensors(
2703  "m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias);
2704  VerifyConstTensors(
2705  "m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights);
2706  VerifyConstTensors(
2707  "m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights);
2708  VerifyConstTensors(
2709  "m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights);
2710  VerifyConstTensors(
2711  "m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights);
2712  }
2713 
2714 private:
2715  armnn::LstmInputParams m_InputParams;
2716 };
2717 
2718 BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmCifgPeepholeNoProjection)
2719 {
2720  armnn::LstmDescriptor descriptor;
2721  descriptor.m_ActivationFunc = 4;
2722  descriptor.m_ClippingThresProj = 0.0f;
2723  descriptor.m_ClippingThresCell = 0.0f;
2724  descriptor.m_CifgEnabled = true; // if this is true then we DON'T need to set the OptCifgParams
2725  descriptor.m_ProjectionEnabled = false;
2726  descriptor.m_PeepholeEnabled = true;
2727 
2728  const uint32_t batchSize = 1;
2729  const uint32_t inputSize = 2;
2730  const uint32_t numUnits = 4;
2731  const uint32_t outputSize = numUnits;
2732 
2733  armnn::TensorInfo inputWeightsInfo1({numUnits, inputSize}, armnn::DataType::Float32);
2734  std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
2735  armnn::ConstTensor inputToForgetWeights(inputWeightsInfo1, inputToForgetWeightsData);
2736 
2737  std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
2738  armnn::ConstTensor inputToCellWeights(inputWeightsInfo1, inputToCellWeightsData);
2739 
2740  std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
2741  armnn::ConstTensor inputToOutputWeights(inputWeightsInfo1, inputToOutputWeightsData);
2742 
2743  armnn::TensorInfo inputWeightsInfo2({numUnits, outputSize}, armnn::DataType::Float32);
2744  std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
2745  armnn::ConstTensor recurrentToForgetWeights(inputWeightsInfo2, recurrentToForgetWeightsData);
2746 
2747  std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
2748  armnn::ConstTensor recurrentToCellWeights(inputWeightsInfo2, recurrentToCellWeightsData);
2749 
2750  std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
2751  armnn::ConstTensor recurrentToOutputWeights(inputWeightsInfo2, recurrentToOutputWeightsData);
2752 
2753  armnn::TensorInfo inputWeightsInfo3({numUnits}, armnn::DataType::Float32);
2754  std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
2755  armnn::ConstTensor cellToForgetWeights(inputWeightsInfo3, cellToForgetWeightsData);
2756 
2757  std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
2758  armnn::ConstTensor cellToOutputWeights(inputWeightsInfo3, cellToOutputWeightsData);
2759 
2760  std::vector<float> forgetGateBiasData(numUnits, 1.0f);
2761  armnn::ConstTensor forgetGateBias(inputWeightsInfo3, forgetGateBiasData);
2762 
2763  std::vector<float> cellBiasData(numUnits, 0.0f);
2764  armnn::ConstTensor cellBias(inputWeightsInfo3, cellBiasData);
2765 
2766  std::vector<float> outputGateBiasData(numUnits, 0.0f);
2767  armnn::ConstTensor outputGateBias(inputWeightsInfo3, outputGateBiasData);
2768 
2769  armnn::LstmInputParams params;
2770  params.m_InputToForgetWeights = &inputToForgetWeights;
2771  params.m_InputToCellWeights = &inputToCellWeights;
2772  params.m_InputToOutputWeights = &inputToOutputWeights;
2773  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2774  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
2775  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2776  params.m_ForgetGateBias = &forgetGateBias;
2777  params.m_CellBias = &cellBias;
2778  params.m_OutputGateBias = &outputGateBias;
2779  params.m_CellToForgetWeights = &cellToForgetWeights;
2780  params.m_CellToOutputWeights = &cellToOutputWeights;
2781 
2783  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2784  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
2785  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
2786  const std::string layerName("lstm");
2787  armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
2788  armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
2789  armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
2790  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
2791  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
2792 
2793  // connect up
2794  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
2795  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
2796  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
2797  armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 3 }, armnn::DataType::Float32);
2798 
2799  inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
2800  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2801 
2802  outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
2803  outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
2804 
2805  cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
2806  cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
2807 
2808  lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
2809  lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
2810 
2811  lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
2812  lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
2813 
2814  lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
2815  lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
2816 
2817  lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
2818  lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
2819 
2820  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2821  BOOST_CHECK(deserializedNetwork);
2822 
2823  VerifyLstmLayer checker(
2824  layerName,
2825  {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
2826  {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
2827  descriptor,
2828  params);
2829  deserializedNetwork->Accept(checker);
2830 }
2831 
2832 BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeAndProjection)
2833 {
2834  armnn::LstmDescriptor descriptor;
2835  descriptor.m_ActivationFunc = 4;
2836  descriptor.m_ClippingThresProj = 0.0f;
2837  descriptor.m_ClippingThresCell = 0.0f;
2838  descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
2839  descriptor.m_ProjectionEnabled = true;
2840  descriptor.m_PeepholeEnabled = true;
2841 
2842  const uint32_t batchSize = 2;
2843  const uint32_t inputSize = 5;
2844  const uint32_t numUnits = 20;
2845  const uint32_t outputSize = 16;
2846 
2847  armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
2848  std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
2849  armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
2850 
2851  std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
2852  armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
2853 
2854  std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
2855  armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
2856 
2857  std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
2858  armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
2859 
2860  armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
2861  std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
2862  armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
2863 
2864  std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
2865  armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
2866 
2867  std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
2868  armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
2869 
2870  std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
2871  armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
2872 
2873  armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
2874  std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
2875  armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
2876 
2877  std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
2878  armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
2879 
2880  std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
2881  armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
2882 
2883  std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
2884  armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
2885 
2886  std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
2887  armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
2888 
2889  std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
2890  armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
2891 
2892  std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
2893  armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
2894 
2895  armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
2896  std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
2897  armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
2898 
2899  armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
2900  std::vector<float> projectionBiasData(outputSize, 0.f);
2901  armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
2902 
2903  armnn::LstmInputParams params;
2904  params.m_InputToForgetWeights = &inputToForgetWeights;
2905  params.m_InputToCellWeights = &inputToCellWeights;
2906  params.m_InputToOutputWeights = &inputToOutputWeights;
2907  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2908  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
2909  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2910  params.m_ForgetGateBias = &forgetGateBias;
2911  params.m_CellBias = &cellBias;
2912  params.m_OutputGateBias = &outputGateBias;
2913 
2914  // additional params because: descriptor.m_CifgEnabled = false
2915  params.m_InputToInputWeights = &inputToInputWeights;
2916  params.m_RecurrentToInputWeights = &recurrentToInputWeights;
2917  params.m_CellToInputWeights = &cellToInputWeights;
2918  params.m_InputGateBias = &inputGateBias;
2919 
2920  // additional params because: descriptor.m_ProjectionEnabled = true
2921  params.m_ProjectionWeights = &projectionWeights;
2922  params.m_ProjectionBias = &projectionBias;
2923 
2924  // additional params because: descriptor.m_PeepholeEnabled = true
2925  params.m_CellToForgetWeights = &cellToForgetWeights;
2926  params.m_CellToOutputWeights = &cellToOutputWeights;
2927 
2929  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2930  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
2931  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
2932  const std::string layerName("lstm");
2933  armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
2934  armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
2935  armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
2936  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
2937  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
2938 
2939  // connect up
2940  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
2941  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
2942  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
2943  armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
2944 
2945  inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
2946  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2947 
2948  outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
2949  outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
2950 
2951  cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
2952  cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
2953 
2954  lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
2955  lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
2956 
2957  lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
2958  lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
2959 
2960  lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
2961  lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
2962 
2963  lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
2964  lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
2965 
2966  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2967  BOOST_CHECK(deserializedNetwork);
2968 
2969  VerifyLstmLayer checker(
2970  layerName,
2971  {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
2972  {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
2973  descriptor,
2974  params);
2975  deserializedNetwork->Accept(checker);
2976 }
2977 
2978 BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeWithProjectionWithLayerNorm)
2979 {
2980  armnn::LstmDescriptor descriptor;
2981  descriptor.m_ActivationFunc = 4;
2982  descriptor.m_ClippingThresProj = 0.0f;
2983  descriptor.m_ClippingThresCell = 0.0f;
2984  descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
2985  descriptor.m_ProjectionEnabled = true;
2986  descriptor.m_PeepholeEnabled = true;
2987  descriptor.m_LayerNormEnabled = true;
2988 
2989  const uint32_t batchSize = 2;
2990  const uint32_t inputSize = 5;
2991  const uint32_t numUnits = 20;
2992  const uint32_t outputSize = 16;
2993 
2994  armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
2995  std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
2996  armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
2997 
2998  std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
2999  armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
3000 
3001  std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3002  armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
3003 
3004  std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3005  armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
3006 
3007  armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
3008  std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3009  armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
3010 
3011  std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3012  armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
3013 
3014  std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3015  armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
3016 
3017  std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3018  armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
3019 
3020  armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
3021  std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3022  armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
3023 
3024  std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3025  armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
3026 
3027  std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3028  armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
3029 
3030  std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3031  armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
3032 
3033  std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3034  armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
3035 
3036  std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3037  armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
3038 
3039  std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3040  armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
3041 
3042  armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
3043  std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
3044  armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
3045 
3046  armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
3047  std::vector<float> projectionBiasData(outputSize, 0.f);
3048  armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
3049 
3050  std::vector<float> inputLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3051  armnn::ConstTensor inputLayerNormWeights(tensorInfo20, forgetGateBiasData);
3052 
3053  std::vector<float> forgetLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3054  armnn::ConstTensor forgetLayerNormWeights(tensorInfo20, forgetGateBiasData);
3055 
3056  std::vector<float> cellLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3057  armnn::ConstTensor cellLayerNormWeights(tensorInfo20, forgetGateBiasData);
3058 
3059  std::vector<float> outLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3060  armnn::ConstTensor outLayerNormWeights(tensorInfo20, forgetGateBiasData);
3061 
3062  armnn::LstmInputParams params;
3063  params.m_InputToForgetWeights = &inputToForgetWeights;
3064  params.m_InputToCellWeights = &inputToCellWeights;
3065  params.m_InputToOutputWeights = &inputToOutputWeights;
3066  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3067  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
3068  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3069  params.m_ForgetGateBias = &forgetGateBias;
3070  params.m_CellBias = &cellBias;
3071  params.m_OutputGateBias = &outputGateBias;
3072 
3073  // additional params because: descriptor.m_CifgEnabled = false
3074  params.m_InputToInputWeights = &inputToInputWeights;
3075  params.m_RecurrentToInputWeights = &recurrentToInputWeights;
3076  params.m_CellToInputWeights = &cellToInputWeights;
3077  params.m_InputGateBias = &inputGateBias;
3078 
3079  // additional params because: descriptor.m_ProjectionEnabled = true
3080  params.m_ProjectionWeights = &projectionWeights;
3081  params.m_ProjectionBias = &projectionBias;
3082 
3083  // additional params because: descriptor.m_PeepholeEnabled = true
3084  params.m_CellToForgetWeights = &cellToForgetWeights;
3085  params.m_CellToOutputWeights = &cellToOutputWeights;
3086 
3087  // additional params because: despriptor.m_LayerNormEnabled = true
3088  params.m_InputLayerNormWeights = &inputLayerNormWeights;
3089  params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3090  params.m_CellLayerNormWeights = &cellLayerNormWeights;
3091  params.m_OutputLayerNormWeights = &outLayerNormWeights;
3092 
3094  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
3095  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
3096  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
3097  const std::string layerName("lstm");
3098  armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
3099  armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
3100  armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
3101  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
3102  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
3103 
3104  // connect up
3105  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
3106  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
3107  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
3108  armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
3109 
3110  inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
3111  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
3112 
3113  outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
3114  outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
3115 
3116  cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
3117  cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
3118 
3119  lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
3120  lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
3121 
3122  lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
3123  lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
3124 
3125  lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
3126  lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
3127 
3128  lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
3129  lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
3130 
3131  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
3132  BOOST_CHECK(deserializedNetwork);
3133 
3134  VerifyLstmLayer checker(
3135  layerName,
3136  {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
3137  {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
3138  descriptor,
3139  params);
3140  deserializedNetwork->Accept(checker);
3141 }
3142 
3143 BOOST_AUTO_TEST_CASE(EnsureLstmLayersBackwardCompatibility)
3144 {
3145  // The hex data below is a flat buffer containing a lstm layer with no Cifg, with peephole and projection
3146  // enabled. That data was obtained before additional layer normalization parameters where added to the
3147  // lstm serializer. That way it can be tested if a lstm model with the old parameter configuration can
3148  // still be loaded
3149  const std::vector<uint8_t> lstmNoCifgWithPeepholeAndProjectionModel =
3150  {
3151  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
3152  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
3153  0xDC, 0x29, 0x00, 0x00, 0x38, 0x29, 0x00, 0x00, 0xB4, 0x28, 0x00, 0x00, 0x94, 0x01, 0x00, 0x00, 0x3C, 0x01,
3154  0x00, 0x00, 0xE0, 0x00, 0x00, 0x00, 0x84, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
3155  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00,
3156  0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x70, 0xD6, 0xFF, 0xFF,
3157  0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x06, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x88, 0xD7,
3158  0xFF, 0xFF, 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xF6, 0xD6, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00,
3159  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
3160  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3161  0xE8, 0xD7, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xC8, 0xD6, 0xFF, 0xFF, 0x00, 0x00,
3162  0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x5E, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xE0, 0xD7, 0xFF, 0xFF,
3163  0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x4E, 0xD7, 0xFF, 0xFF, 0x06, 0x00, 0x00, 0x00, 0x10, 0x00,
3164  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3165  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0xD8,
3166  0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
3167  0x04, 0x00, 0x00, 0x00, 0xB6, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x38, 0xD8, 0xFF, 0xFF, 0x08, 0x00,
3168  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xA6, 0xD7, 0xFF, 0xFF, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
3169  0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3170  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x98, 0xD8, 0xFF, 0xFF,
3171  0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x78, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00,
3172  0x00, 0x00, 0x0E, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x16, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
3173  0xFA, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00,
3174  0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
3175  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xEC, 0xD8, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
3176  0x00, 0x00, 0x6C, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x23, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00,
3177  0x12, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0xE0, 0x25, 0x00, 0x00, 0xD0, 0x25,
3178  0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x26, 0x00, 0x48, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00,
3179  0x10, 0x00, 0x14, 0x00, 0x18, 0x00, 0x1C, 0x00, 0x20, 0x00, 0x24, 0x00, 0x28, 0x00, 0x2C, 0x00, 0x30, 0x00,
3180  0x34, 0x00, 0x38, 0x00, 0x3C, 0x00, 0x40, 0x00, 0x44, 0x00, 0x26, 0x00, 0x00, 0x00, 0xC4, 0x23, 0x00, 0x00,
3181  0xF8, 0x21, 0x00, 0x00, 0x2C, 0x20, 0x00, 0x00, 0xF0, 0x1A, 0x00, 0x00, 0xB4, 0x15, 0x00, 0x00, 0x78, 0x10,
3182  0x00, 0x00, 0xF0, 0x0F, 0x00, 0x00, 0x68, 0x0F, 0x00, 0x00, 0xE0, 0x0E, 0x00, 0x00, 0x14, 0x0D, 0x00, 0x00,
3183  0xD8, 0x07, 0x00, 0x00, 0x50, 0x07, 0x00, 0x00, 0xC8, 0x06, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x14, 0x01,
3184  0x00, 0x00, 0x8C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
3185  0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00,
3186  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3187  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3188  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3189  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3190  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x5A, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
3191  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x72, 0xD8,
3192  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x82, 0xD9, 0xFF, 0xFF,
3193  0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3194  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3195  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3196  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3197  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDE, 0xD8,
3198  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
3199  0x14, 0x00, 0x00, 0x00, 0xF6, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x54, 0x00, 0x00, 0x00, 0x04, 0x00,
3200  0x00, 0x00, 0x06, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3201  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3202  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3203  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3204  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xD9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
3205  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6A, 0xD9, 0xFF, 0xFF, 0x00, 0x00,
3206  0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7A, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
3207  0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3208  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3209  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3210  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3211  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3212  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3213  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3214  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3215  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3216  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3217  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3218  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3219  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3220  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3221  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3222  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3223  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3224  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3225  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3226  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3227  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3228  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3229  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3230  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3231  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3232  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3233  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3234  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3235  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3236  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3237  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3238  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3239  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3240  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3241  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3242  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3243  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3244  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3245  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3246  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3247  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3248  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3249  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3250  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3251  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3252  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3253  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3254  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3255  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3256  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3257  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3258  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3259  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3260  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3261  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3262  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3263  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3264  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3265  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3266  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3267  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3268  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3269  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3270  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3271  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3272  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3273  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3274  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3275  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3276  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3277  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3278  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x86, 0xDE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
3279  0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xA2, 0xDE,
3280  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB2, 0xDF, 0xFF, 0xFF,
3281  0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3282  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3283  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3284  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3285  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0xDF,
3286  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
3287  0x14, 0x00, 0x00, 0x00, 0x26, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00,
3288  0x00, 0x00, 0x36, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3289  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3290  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3291  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3292  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3293  0x00, 0x00, 0x00, 0x00, 0x92, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
3294  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xAA, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
3295  0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xBA, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
3296  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3297  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3298  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3299  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3300  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3301  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3302  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3303  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3304  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3305  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3306  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3307  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3308  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3309  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3310  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3311  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3312  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3313  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3314  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3315  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3316  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3317  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3318  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3319  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3320  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3321  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3322  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3323  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3324  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3325  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3326  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3327  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3328  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3329  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3330  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3331  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3332  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3333  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3334  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3335  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3336  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3337  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3338  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3339  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3340  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3341  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3342  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3343  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3344  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3345  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3346  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3347  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3348  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3349  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3350  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3351  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3352  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3353  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3354  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3355  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3356  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3357  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3358  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3359  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3360  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3361  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3362  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3363  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3364  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3365  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3366  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3367  0x00, 0x00, 0x00, 0x00, 0xC6, 0xE4, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
3368  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xE2, 0xE4, 0xFF, 0xFF,
3369  0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF2, 0xE5, 0xFF, 0xFF, 0x04, 0x00,
3370  0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3371  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3372  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3373  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3374  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3375  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3376  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3377  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3378  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3379  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3380  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3381  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3382  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3383  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3384  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3385  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3386  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3387  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3388  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3389  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3390  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3391  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3392  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8E, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
3393  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
3394  0x00, 0x00, 0xAA, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
3395  0xBA, 0xE7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3396  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3397  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3398  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3399  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3400  0x00, 0x00, 0x16, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3401  0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x2E, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00,
3402  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x3E, 0xE8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00,
3403  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3404  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3405  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3406  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3407  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9A, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
3408  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xB2, 0xE7, 0xFF, 0xFF,
3409  0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xC2, 0xE8, 0xFF, 0xFF, 0x04, 0x00,
3410  0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3411  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3412  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3413  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3414  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1E, 0xE8, 0xFF, 0xFF,
3415  0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00,
3416  0x00, 0x00, 0x36, 0xE8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
3417  0x46, 0xE9, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3418  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3419  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3420  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3421  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3422  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3423  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3424  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3425  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3426  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3427  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3428  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3429  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3430  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3431  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3432  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3433  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3434  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3435  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3436  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3437  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3438  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3439  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3440  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3441  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3442  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3443  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3444  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3445  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3446  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3447  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3448  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3449  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3450  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3451  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3452  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3453  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3454  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3455  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3456  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3457  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3458  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3459  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3460  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3461  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3462  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3463  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3464  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3465  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3466  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3467  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3468  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3469  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3470  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3471  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3472  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3473  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3474  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3475  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3476  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3477  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3478  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3479  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3480  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3481  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3482  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3483  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3484  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3485  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3486  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3487  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3488  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xED, 0xFF, 0xFF,
3489  0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00,
3490  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6E, 0xED, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00,
3491  0x04, 0x00, 0x00, 0x00, 0x7E, 0xEE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00,
3492  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3493  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3494  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3495  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3496  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3497  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3498  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3499  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3500  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3501  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3502  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3503  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3504  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3505  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3506  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3507  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3508  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3509  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3510  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3511  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3512  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3513  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3514  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3515  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3516  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3517  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3518  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3519  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3520  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3521  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3522  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3523  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3524  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3525  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3526  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3527  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3528  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3529  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3530  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3531  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3532  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3533  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3534  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3535  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3536  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3537  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3538  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3539  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3540  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3541  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3542  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3543  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3544  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3545  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3546  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3547  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3548  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3549  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3550  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3551  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3552  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3553  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3554  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3555  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3556  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3557  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3558  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3559  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3560  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3561  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3562  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3563  0x8A, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
3564  0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xA6, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
3565  0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB6, 0xF3, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
3566  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3567  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3568  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3569  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3570  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3571  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3572  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3573  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3574  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3575  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3576  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3577  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3578  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3579  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3580  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3581  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3582  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3583  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3584  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3585  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3586  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3587  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3588  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3589  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3590  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3591  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3592  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3593  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3594  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3595  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3596  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3597  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3598  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3599  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3600  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3601  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3602  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3603  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3604  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3605  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3606  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3607  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3608  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3609  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3610  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3611  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3612  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3613  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3614  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3615  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3616  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3617  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3618  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3619  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3620  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3621  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3622  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3623  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3624  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3625  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3626  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3627  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3628  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3629  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3630  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3631  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3632  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3633  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3634  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3635  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3636  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3637  0x00, 0x00, 0x00, 0x00, 0xC2, 0xF7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
3638  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xDE, 0xF7, 0xFF, 0xFF,
3639  0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xF8, 0xFF, 0xFF, 0x04, 0x00,
3640  0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3641  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3642  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3643  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3644  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3645  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3646  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3647  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3648  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3649  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3650  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3651  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3652  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3653  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3654  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3655  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3656  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3657  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3658  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3659  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3660  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3661  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3662  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8A, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
3663  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
3664  0x00, 0x00, 0xA6, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
3665  0xB6, 0xFA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3666  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3667  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3668  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3669  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3670  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3671  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3672  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3673  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3674  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3675  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3676  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3677  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3678  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3679  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3680  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3681  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3682  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3683  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3684  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3685  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3686  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3687  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xFB,
3688  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
3689  0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x6E, 0xFB, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01,
3690  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFC, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00,
3691  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3692  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3693  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3694  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3695  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3696  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3697  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3698  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3699  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3700  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3701  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3702  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3703  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3704  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3705  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3706  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3707  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3708  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3709  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3710  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3711  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3712  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3713  0x00, 0x00, 0x00, 0x00, 0x1A, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
3714  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x0C, 0x00,
3715  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
3716  0x01, 0x01, 0x04, 0x00, 0x00, 0x00, 0x2E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
3717  0x22, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6C, 0x73,
3718  0x74, 0x6D, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xEC, 0x00, 0x00, 0x00, 0xD0, 0x00, 0x00, 0x00,
3719  0xB4, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x88, 0x00, 0x00, 0x00, 0x5C, 0x00, 0x00, 0x00, 0x30, 0x00,
3720  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x14, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
3721  0xA6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
3722  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x3C, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
3723  0x04, 0x00, 0x00, 0x00, 0xCE, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
3724  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x64, 0xFF, 0xFF, 0xFF,
3725  0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
3726  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
3727  0xB4, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x1A, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
3728  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00,
3729  0xF0, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00,
3730  0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
3731  0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
3732  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
3733  0x7E, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00,
3734  0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x76, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
3735  0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
3736  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
3737  0x68, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
3738  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
3739  0x08, 0x00, 0x0E, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00,
3740  0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
3741  0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00,
3742  0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00,
3743  0x0E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
3744  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3745  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
3746  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6E, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
3747  0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x08, 0x00,
3748  0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
3749  0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00,
3750  0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
3751  0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
3752  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3753  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
3754  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00,
3755  0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
3756  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00
3757  };
3758 
3759  armnn::INetworkPtr deserializedNetwork =
3760  DeserializeNetwork(std::string(lstmNoCifgWithPeepholeAndProjectionModel.begin(),
3761  lstmNoCifgWithPeepholeAndProjectionModel.end()));
3762 
3763  BOOST_CHECK(deserializedNetwork);
3764 
3765  // generating the same model parameters which where used to serialize the model (Layer norm is not specified)
3766  armnn::LstmDescriptor descriptor;
3767  descriptor.m_ActivationFunc = 4;
3768  descriptor.m_ClippingThresProj = 0.0f;
3769  descriptor.m_ClippingThresCell = 0.0f;
3770  descriptor.m_CifgEnabled = false;
3771  descriptor.m_ProjectionEnabled = true;
3772  descriptor.m_PeepholeEnabled = true;
3773 
3774  const uint32_t batchSize = 2u;
3775  const uint32_t inputSize = 5u;
3776  const uint32_t numUnits = 20u;
3777  const uint32_t outputSize = 16u;
3778 
3779  armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
3780  std::vector<float> inputToInputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
3781  armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
3782 
3783  std::vector<float> inputToForgetWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
3784  armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
3785 
3786  std::vector<float> inputToCellWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
3787  armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
3788 
3789  std::vector<float> inputToOutputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
3790  armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
3791 
3792  armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
3793  std::vector<float> inputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
3794  armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
3795 
3796  std::vector<float> forgetGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
3797  armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
3798 
3799  std::vector<float> cellBiasData(tensorInfo20.GetNumElements(), 0.0f);
3800  armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
3801 
3802  std::vector<float> outputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
3803  armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
3804 
3805  armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
3806  std::vector<float> recurrentToInputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
3807  armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
3808 
3809  std::vector<float> recurrentToForgetWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
3810  armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
3811 
3812  std::vector<float> recurrentToCellWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
3813  armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
3814 
3815  std::vector<float> recurrentToOutputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
3816  armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
3817 
3818  std::vector<float> cellToInputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
3819  armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
3820 
3821  std::vector<float> cellToForgetWeightsData(tensorInfo20.GetNumElements(), 0.0f);
3822  armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
3823 
3824  std::vector<float> cellToOutputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
3825  armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
3826 
3827  armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
3828  std::vector<float> projectionWeightsData(tensorInfo16x20.GetNumElements(), 0.0f);
3829  armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
3830 
3831  armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
3832  std::vector<float> projectionBiasData(outputSize, 0.0f);
3833  armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
3834 
3835  armnn::LstmInputParams params;
3836  params.m_InputToForgetWeights = &inputToForgetWeights;
3837  params.m_InputToCellWeights = &inputToCellWeights;
3838  params.m_InputToOutputWeights = &inputToOutputWeights;
3839  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3840  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
3841  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3842  params.m_ForgetGateBias = &forgetGateBias;
3843  params.m_CellBias = &cellBias;
3844  params.m_OutputGateBias = &outputGateBias;
3845 
3846  // additional params because: descriptor.m_CifgEnabled = false
3847  params.m_InputToInputWeights = &inputToInputWeights;
3848  params.m_RecurrentToInputWeights = &recurrentToInputWeights;
3849  params.m_CellToInputWeights = &cellToInputWeights;
3850  params.m_InputGateBias = &inputGateBias;
3851 
3852  // additional params because: descriptor.m_ProjectionEnabled = true
3853  params.m_ProjectionWeights = &projectionWeights;
3854  params.m_ProjectionBias = &projectionBias;
3855 
3856  // additional params because: descriptor.m_PeepholeEnabled = true
3857  params.m_CellToForgetWeights = &cellToForgetWeights;
3858  params.m_CellToOutputWeights = &cellToOutputWeights;
3859 
3860  const std::string layerName("lstm");
3861  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
3862  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
3863  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
3864  armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
3865 
3866  VerifyLstmLayer checker(
3867  layerName,
3868  {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
3869  {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
3870  descriptor,
3871  params);
3872  deserializedNetwork->Accept(checker);
3873 }
3874 class VerifyQuantizedLstmLayer : public LayerVerifierBase
3875 {
3876 
3877 public:
3878  VerifyQuantizedLstmLayer(const std::string& layerName,
3879  const std::vector<armnn::TensorInfo>& inputInfos,
3880  const std::vector<armnn::TensorInfo>& outputInfos,
3881  const armnn::QuantizedLstmInputParams& inputParams)
3882  : LayerVerifierBase(layerName, inputInfos, outputInfos), m_InputParams(inputParams) {}
3883 
3884  void VisitQuantizedLstmLayer(const armnn::IConnectableLayer* layer,
3885  const armnn::QuantizedLstmInputParams& params,
3886  const char* name)
3887  {
3888  VerifyNameAndConnections(layer, name);
3889  VerifyInputParameters(params);
3890  }
3891 
3892 protected:
3893  void VerifyInputParameters(const armnn::QuantizedLstmInputParams& params)
3894  {
3895  VerifyConstTensors("m_InputToInputWeights",
3896  m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
3897  VerifyConstTensors("m_InputToForgetWeights",
3898  m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
3899  VerifyConstTensors("m_InputToCellWeights",
3900  m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
3901  VerifyConstTensors("m_InputToOutputWeights",
3902  m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
3903  VerifyConstTensors("m_RecurrentToInputWeights",
3904  m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
3905  VerifyConstTensors("m_RecurrentToForgetWeights",
3906  m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
3907  VerifyConstTensors("m_RecurrentToCellWeights",
3908  m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
3909  VerifyConstTensors("m_RecurrentToOutputWeights",
3910  m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
3911  VerifyConstTensors("m_InputGateBias",
3912  m_InputParams.m_InputGateBias, params.m_InputGateBias);
3913  VerifyConstTensors("m_ForgetGateBias",
3914  m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
3915  VerifyConstTensors("m_CellBias",
3916  m_InputParams.m_CellBias, params.m_CellBias);
3917  VerifyConstTensors("m_OutputGateBias",
3918  m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
3919  }
3920 
3921 private:
3922  armnn::QuantizedLstmInputParams m_InputParams;
3923 };
3924 
3925 BOOST_AUTO_TEST_CASE(SerializeDeserializeQuantizedLstm)
3926 {
3927  const uint32_t batchSize = 1;
3928  const uint32_t inputSize = 2;
3929  const uint32_t numUnits = 4;
3930  const uint32_t outputSize = numUnits;
3931 
3932  // Scale/Offset for input/output, cellState In/Out, weights, bias
3933  float inputOutputScale = 0.0078125f;
3934  int32_t inputOutputOffset = 128;
3935 
3936  float cellStateScale = 0.00048828125f;
3937  int32_t cellStateOffset = 0;
3938 
3939  float weightsScale = 0.00408021f;
3940  int32_t weightsOffset = 100;
3941 
3942  float biasScale = 3.1876640625e-05f;
3943  int32_t biasOffset = 0;
3944 
3945  // The shape of weight data is {outputSize, inputSize} = {4, 2}
3946  armnn::TensorShape inputToInputWeightsShape = {4, 2};
3947  std::vector<uint8_t> inputToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
3948  armnn::TensorInfo inputToInputWeightsInfo(inputToInputWeightsShape,
3950  weightsScale,
3951  weightsOffset);
3952  armnn::ConstTensor inputToInputWeights(inputToInputWeightsInfo, inputToInputWeightsData);
3953 
3954  armnn::TensorShape inputToForgetWeightsShape = {4, 2};
3955  std::vector<uint8_t> inputToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
3956  armnn::TensorInfo inputToForgetWeightsInfo(inputToForgetWeightsShape,
3958  weightsScale,
3959  weightsOffset);
3960  armnn::ConstTensor inputToForgetWeights(inputToForgetWeightsInfo, inputToForgetWeightsData);
3961 
3962  armnn::TensorShape inputToCellWeightsShape = {4, 2};
3963  std::vector<uint8_t> inputToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
3964  armnn::TensorInfo inputToCellWeightsInfo(inputToCellWeightsShape,
3966  weightsScale,
3967  weightsOffset);
3968  armnn::ConstTensor inputToCellWeights(inputToCellWeightsInfo, inputToCellWeightsData);
3969 
3970  armnn::TensorShape inputToOutputWeightsShape = {4, 2};
3971  std::vector<uint8_t> inputToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
3972  armnn::TensorInfo inputToOutputWeightsInfo(inputToOutputWeightsShape,
3974  weightsScale,
3975  weightsOffset);
3976  armnn::ConstTensor inputToOutputWeights(inputToOutputWeightsInfo, inputToOutputWeightsData);
3977 
3978  // The shape of recurrent weight data is {outputSize, outputSize} = {4, 4}
3979  armnn::TensorShape recurrentToInputWeightsShape = {4, 4};
3980  std::vector<uint8_t> recurrentToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
3981  armnn::TensorInfo recurrentToInputWeightsInfo(recurrentToInputWeightsShape,
3983  weightsScale,
3984  weightsOffset);
3985  armnn::ConstTensor recurrentToInputWeights(recurrentToInputWeightsInfo, recurrentToInputWeightsData);
3986 
3987  armnn::TensorShape recurrentToForgetWeightsShape = {4, 4};
3988  std::vector<uint8_t> recurrentToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
3989  armnn::TensorInfo recurrentToForgetWeightsInfo(recurrentToForgetWeightsShape,
3991  weightsScale,
3992  weightsOffset);
3993  armnn::ConstTensor recurrentToForgetWeights(recurrentToForgetWeightsInfo, recurrentToForgetWeightsData);
3994 
3995  armnn::TensorShape recurrentToCellWeightsShape = {4, 4};
3996  std::vector<uint8_t> recurrentToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
3997  armnn::TensorInfo recurrentToCellWeightsInfo(recurrentToCellWeightsShape,
3999  weightsScale,
4000  weightsOffset);
4001  armnn::ConstTensor recurrentToCellWeights(recurrentToCellWeightsInfo, recurrentToCellWeightsData);
4002 
4003  armnn::TensorShape recurrentToOutputWeightsShape = {4, 4};
4004  std::vector<uint8_t> recurrentToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
4005  armnn::TensorInfo recurrentToOutputWeightsInfo(recurrentToOutputWeightsShape,
4007  weightsScale,
4008  weightsOffset);
4009  armnn::ConstTensor recurrentToOutputWeights(recurrentToOutputWeightsInfo, recurrentToOutputWeightsData);
4010 
4011  // The shape of bias data is {outputSize} = {4}
4012  armnn::TensorShape inputGateBiasShape = {4};
4013  std::vector<int32_t> inputGateBiasData = {1, 2, 3, 4};
4014  armnn::TensorInfo inputGateBiasInfo(inputGateBiasShape,
4016  biasScale,
4017  biasOffset);
4018  armnn::ConstTensor inputGateBias(inputGateBiasInfo, inputGateBiasData);
4019 
4020  armnn::TensorShape forgetGateBiasShape = {4};
4021  std::vector<int32_t> forgetGateBiasData = {1, 2, 3, 4};
4022  armnn::TensorInfo forgetGateBiasInfo(forgetGateBiasShape,
4024  biasScale,
4025  biasOffset);
4026  armnn::ConstTensor forgetGateBias(forgetGateBiasInfo, forgetGateBiasData);
4027 
4028  armnn::TensorShape cellBiasShape = {4};
4029  std::vector<int32_t> cellBiasData = {1, 2, 3, 4};
4030  armnn::TensorInfo cellBiasInfo(cellBiasShape,
4032  biasScale,
4033  biasOffset);
4034  armnn::ConstTensor cellBias(cellBiasInfo, cellBiasData);
4035 
4036  armnn::TensorShape outputGateBiasShape = {4};
4037  std::vector<int32_t> outputGateBiasData = {1, 2, 3, 4};
4038  armnn::TensorInfo outputGateBiasInfo(outputGateBiasShape,
4040  biasScale,
4041  biasOffset);
4042  armnn::ConstTensor outputGateBias(outputGateBiasInfo, outputGateBiasData);
4043 
4045  params.m_InputToInputWeights = &inputToInputWeights;
4046  params.m_InputToForgetWeights = &inputToForgetWeights;
4047  params.m_InputToCellWeights = &inputToCellWeights;
4048  params.m_InputToOutputWeights = &inputToOutputWeights;
4049  params.m_RecurrentToInputWeights = &recurrentToInputWeights;
4050  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
4051  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
4052  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
4053  params.m_InputGateBias = &inputGateBias;
4054  params.m_ForgetGateBias = &forgetGateBias;
4055  params.m_CellBias = &cellBias;
4056  params.m_OutputGateBias = &outputGateBias;
4057 
4059  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
4060  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
4061  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
4062  const std::string layerName("QuantizedLstm");
4063  armnn::IConnectableLayer* const quantizedLstmLayer = network->AddQuantizedLstmLayer(params, layerName.c_str());
4064  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(0);
4065  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(1);
4066 
4067  // Connect up
4068  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize },
4070  inputOutputScale,
4071  inputOutputOffset);
4072  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits },
4074  cellStateScale,
4075  cellStateOffset);
4076  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize },
4078  inputOutputScale,
4079  inputOutputOffset);
4080 
4081  inputLayer->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(0));
4082  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
4083 
4084  cellStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(1));
4085  cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
4086 
4087  outputStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(2));
4088  outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
4089 
4090  quantizedLstmLayer->GetOutputSlot(0).Connect(cellStateOut->GetInputSlot(0));
4091  quantizedLstmLayer->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
4092 
4093  quantizedLstmLayer->GetOutputSlot(1).Connect(outputLayer->GetInputSlot(0));
4094  quantizedLstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
4095 
4096  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
4097  BOOST_CHECK(deserializedNetwork);
4098 
4099  VerifyQuantizedLstmLayer checker(layerName,
4100  {inputTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
4101  {cellStateTensorInfo, outputStateTensorInfo},
4102  params);
4103 
4104  deserializedNetwork->Accept(checker);
4105 }
4106 
void BatchToSpaceNd(const DataLayoutIndexed &dataLayout, const TensorInfo &inputTensorInfo, const TensorInfo &outputTensorInfo, const std::vector< unsigned int > &blockShape, const std::vector< std::pair< unsigned int, unsigned int >> &cropsData, Decoder< float > &inputDecoder, Encoder< float > &outputEncoder)
bool m_ProjectionEnabled
Enable/disable the projection layer.
virtual const IOutputSlot * GetConnection() const =0
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
const ConstTensor * m_InputToOutputWeights
Definition: LstmParams.hpp:43
float m_ScaleX
Center size encoding scale x.
OriginsDescriptor CreateDescriptorForConcatenation(TensorShapeIt first, TensorShapeIt last, unsigned int concatenationDimension)
Convenience template to create an OriginsDescriptor to use when creating a ConcatLayer for performing...
uint32_t m_PadBottom
Padding bottom value in the height dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
const ConstTensor * m_CellToForgetWeights
Definition: LstmParams.hpp:49
const ConstTensor * m_RecurrentToInputWeights
unsigned int GetNumBytes() const
Definition: Tensor.hpp:174
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
void Pooling2d(Decoder< float > &rInputDecoder, Encoder< float > &rOutputEncoder, const TensorInfo &inputInfo, const TensorInfo &outputInfo, const Pooling2dDescriptor &params)
Computes the Pooling2d operation.
Definition: Pooling2d.cpp:143
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
int32_t GetQuantizationOffset() const
Definition: Tensor.cpp:264
bool SaveSerializedToStream(std::ostream &stream) override
uint32_t m_PadRight
Padding right value in the width dimension.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
void Permute(const armnn::TensorShape &dstShape, const armnn::PermutationVector &mappings, const void *src, void *dst, size_t dataTypeSize)
Definition: Permute.cpp:121
uint32_t m_NumClasses
Number of classes.
DataType GetDataType() const
Definition: Tensor.hpp:172
uint32_t m_DilationX
Dilation factor value for width dimension.
A NormalizationDescriptor for the NormalizationLayer.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:199
virtual const TensorInfo & GetTensorInfo() const =0
float m_ScaleW
Center size encoding scale weight.
uint32_t m_PadTop
Padding top value in the height dimension.
const ConstTensor * m_InputToForgetWeights
Definition: LstmParams.hpp:41
float m_ClippingThresCell
Clipping threshold value for the cell state.
QuantizedType Quantize(float value, float scale, int32_t offset)
Explicit specialization of Quantize for int8_t.
Definition: TypesUtils.cpp:31
uint32_t m_PadLeft
Padding left value in the width dimension.
uint32_t m_TargetHeight
Target height value.
unsigned int GetNumElements() const
Definition: Tensor.hpp:175
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
const ConstTensor * m_RecurrentToOutputWeights
Definition: LstmParams.hpp:47
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
bool m_CifgEnabled
Enable/disable cifg (coupled input & forget gate).
void SpaceToBatchNd(const TensorInfo &inputInfo, const TensorInfo &outputInfo, const SpaceToBatchNdDescriptor &params, Decoder< float > &inputData, Encoder< float > &outputData)
A PadDescriptor for the PadLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Status SetViewSize(uint32_t view, uint32_t coord, uint32_t value)
Set the size of the views. The arguments are: view, dimension, value. If the view is greater than or ...
#define DECLARE_LAYER_VERIFIER_CLASS(name)
const ConstTensor * m_OutputLayerNormWeights
Definition: LstmParams.hpp:60
float m_ScaleY
Center size encoding scale y.
const ConstTensor * m_RecurrentToForgetWeights
const TensorShape & GetShape() const
Definition: Tensor.hpp:169
uint32_t m_TargetWidth
Target width value.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Status SetViewOriginCoord(uint32_t view, uint32_t coord, uint32_t value)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadTop
Padding top value in the height dimension.
const ConstTensor * m_InputLayerNormWeights
Definition: LstmParams.hpp:57
A L2NormalizationDescriptor for the L2NormalizationLayer.
float Dequantize(QuantizedType value, float scale, int32_t offset)
Definition: TypesUtils.cpp:47
bool m_BiasEnabled
Enable/disable bias.
A ViewsDescriptor for the SplitterLayer. Descriptor to configure the splitting process. Number of Views must be equal to the number of outputs, and their order must match - e.g. first view corresponds to the first output, second view to the second output, etc.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
Definition: Descriptors.hpp:56
const ConstTensor * m_RecurrentToOutputWeights
#define DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(name)
const ConstTensor * m_InputToForgetWeights
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
float m_Beta
Exponentiation value.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
bool m_UseRegularNms
Use Regular NMS.
A ReshapeDescriptor for the ReshapeLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_PadLeft
Padding left value in the width dimension.
const ConstTensor * m_CellBias
Definition: LstmParams.hpp:53
const ConstTensor * m_ForgetLayerNormWeights
Definition: LstmParams.hpp:58
void Softmax(Decoder< float > &in, Encoder< float > &out, const TensorInfo &inputTensorInfo, float beta, int axis)
Computes the softmax function on some inputs, into outputs, with a shape given by tensorInfo...
Definition: Softmax.cpp:17
bool m_PeepholeEnabled
Enable/disable peephole.
const ConstTensor * m_OutputGateBias
Definition: LstmParams.hpp:54
void Pad(const TensorInfo &inputInfo, const TensorInfo &outputInfo, std::vector< std::pair< unsigned int, unsigned int >> m_padList, const T *inputData, T *outData, const float padValue)
Definition: Pad.cpp:22
void Serialize(const armnn::INetwork &inNetwork) override
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
const ConstTensor * m_InputToCellWeights
Definition: LstmParams.hpp:42
float m_NmsIouThreshold
Intersection over union threshold.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
const ConstTensor * m_RecurrentToCellWeights
Definition: LstmParams.hpp:46
The padding fields don&#39;t count and are ignored.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
const ConstTensor * m_ProjectionWeights
Definition: LstmParams.hpp:55
void ArgMinMax(Decoder< float > &in, int32_t *out, const TensorInfo &inputTensorInfo, const TensorInfo &outputTensorInfo, ArgMinMaxFunction function, int axis)
Definition: ArgMinMax.cpp:15
An LstmDescriptor for the LstmLayer.
void Mean(const armnn::TensorInfo &inputInfo, const armnn::TensorInfo &outputInfo, const std::vector< unsigned int > &axis, Decoder< float > &input, Encoder< float > &output)
Definition: Mean.cpp:71
const ConstTensor * m_RecurrentToCellWeights
const ConstTensor * m_CellToInputWeights
Definition: LstmParams.hpp:48
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:259
virtual unsigned int GetNumOutputSlots() const =0
static INetworkPtr Create()
Definition: Network.cpp:48
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
BOOST_CHECK(profilingService.GetCurrentState()==ProfilingState::WaitingForAck)
A FullyConnectedDescriptor for the FullyConnectedLayer.
void StridedSlice(const TensorInfo &inputInfo, const StridedSliceDescriptor &params, const void *inputData, void *outputData, unsigned int dataTypeSize)
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
float m_NmsScoreThreshold
NMS score threshold.
const ConstTensor * m_RecurrentToForgetWeights
Definition: LstmParams.hpp:45
void Slice(const TensorInfo &inputInfo, const SliceDescriptor &descriptor, const void *inputData, void *outputData, unsigned int dataTypeSize)
Definition: Slice.cpp:15
void SpaceToDepth(const TensorInfo &inputInfo, const TensorInfo &outputInfo, const SpaceToDepthDescriptor &params, Decoder< float > &inputData, Encoder< float > &outputData)
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:85
uint32_t m_PadRight
Padding right value in the width dimension.
bool m_BiasEnabled
Enable/disable bias.
const ConstTensor * m_CellLayerNormWeights
Definition: LstmParams.hpp:59
const TensorInfo & GetInfo() const
Definition: Tensor.hpp:167
const ConstTensor * m_RecurrentToInputWeights
Definition: LstmParams.hpp:44
void Splitter(const SplitterQueueDescriptor &data)
Definition: Splitter.hpp:17
A StackDescriptor for the StackLayer.
BOOST_AUTO_TEST_SUITE_END()
A ResizeBilinearDescriptor for the ResizeBilinearLayer.
A SoftmaxDescriptor for the SoftmaxLayer.
An output connection slot for a layer. The output slot may be connected to 1 or more input slots of s...
Definition: INetwork.hpp:37
uint32_t m_TargetWidth
Target width value.
const ConstTensor * m_ForgetGateBias
Definition: LstmParams.hpp:52
uint32_t m_PadLeft
Padding left value in the width dimension.
const ConstTensor * m_InputGateBias
Definition: LstmParams.hpp:51
const ConstTensor * m_InputToOutputWeights
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
ComparisonOperation m_Operation
Specifies the comparison operation to execute.
Definition: Descriptors.hpp:78
bool m_BiasEnabled
Enable/disable bias.
DataType
Definition: Types.hpp:32
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
std::vector< float > anchors({ 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 100.5f, 1.0f, 1.0f })
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
const ConstTensor * m_InputToInputWeights
Definition: LstmParams.hpp:40
const ConstTensor * m_ProjectionBias
Definition: LstmParams.hpp:56
DataType GetDataType() const
Definition: Tensor.hpp:95
MemoryType GetMemoryArea() const
Definition: Tensor.hpp:177
BOOST_AUTO_TEST_SUITE(TensorflowLiteParser)
A Pooling2dDescriptor for the Pooling2dLayer.
BOOST_AUTO_TEST_CASE(SerializeAddition)
uint32_t m_ActivationFunc
The activation function to use. 0: None, 1: Relu, 3: Relu6, 4: Tanh, 6: Sigmoid.
virtual unsigned int GetNumInputSlots() const =0
uint32_t m_DilationY
Dilation along y axis.
A StandInDescriptor for the StandIn layer.
armnn::TensorInfo anchorsInfo({ 6, 4 }, armnn::DataType::Float32)
A SliceDescriptor for the SliceLayer.
bool m_LayerNormEnabled
Enable/disable layer normalization.
bool m_BiasEnabled
Enable/disable bias.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
const ConstTensor * m_CellToOutputWeights
Definition: LstmParams.hpp:50
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
void Resize(Decoder< float > &in, const TensorInfo &inputInfo, Encoder< float > &out, const TensorInfo &outputInfo, DataLayoutIndexed dataLayout, armnn::ResizeMethod resizeMethod, bool alignCorners)
Definition: Resize.cpp:35
A PermuteDescriptor for the PermuteLayer.
A Convolution2dDescriptor for the Convolution2dLayer.
A MeanDescriptor for the MeanLayer.
virtual int Connect(IInputSlot &destination)=0
uint32_t m_DilationX
Dilation along x axis.
void Stack(const StackQueueDescriptor &data, std::vector< std::unique_ptr< Decoder< float >>> &inputs, Encoder< float > &output)
Definition: Stack.cpp:12
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
const TensorShape & GetShape() const
Definition: Tensor.hpp:88
float m_ClippingThresProj
Clipping threshold value for the projection.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
An ArgMinMaxDescriptor for ArgMinMaxLayer.
Definition: Descriptors.hpp:43
bool has_value() const noexcept
Definition: Optional.hpp:53
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:168
float m_ScaleH
Center size encoding scale height.
constexpr const char * GetDataTypeName(DataType dataType)
Definition: TypesUtils.hpp:165
int32_t m_EndMask
End mask value. If set, then the end is disregarded and the fullest range is used for the dimension...
void DepthToSpace(const TensorInfo &inputInfo, const DepthToSpaceDescriptor &descriptor, const void *inputData, void *outputData, unsigned int dataTypeSize)
A ResizeDescriptor for the ResizeLayer.
A ComparisonDescriptor for the ComparisonLayer.
Definition: Descriptors.hpp:62
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
float GetQuantizationScale() const
Definition: Tensor.cpp:247
void LogSoftmax(Decoder< float > &input, Encoder< float > &output, const TensorInfo &inputInfo, const LogSoftmaxDescriptor &descriptor)
Definition: LogSoftmax.cpp:30
An OriginsDescriptor for the ConcatLayer. Descriptor to configure the concatenation process...
uint32_t m_DilationY
Dilation factor value for height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
A StridedSliceDescriptor for the StridedSliceLayer.
uint32_t m_PadRight
Padding right value in the width dimension.