ArmNN
 20.08
SerializerTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "../Serializer.hpp"
7 
8 #include <armnn/Descriptors.hpp>
9 #include <armnn/INetwork.hpp>
10 #include <armnn/TypesUtils.hpp>
11 #include <armnn/LstmParams.hpp>
14 
15 #include <random>
16 #include <vector>
17 
18 #include <boost/test/unit_test.hpp>
19 
21 
22 namespace
23 {
24 
25 #define DECLARE_LAYER_VERIFIER_CLASS(name) \
26 class name##LayerVerifier : public LayerVerifierBase \
27 { \
28 public: \
29  name##LayerVerifier(const std::string& layerName, \
30  const std::vector<armnn::TensorInfo>& inputInfos, \
31  const std::vector<armnn::TensorInfo>& outputInfos) \
32  : LayerVerifierBase(layerName, inputInfos, outputInfos) {} \
33 \
34  void Visit##name##Layer(const armnn::IConnectableLayer* layer, const char* name) override \
35  { \
36  VerifyNameAndConnections(layer, name); \
37  } \
38 };
39 
40 #define DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(name) \
41 class name##LayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::name##Descriptor> \
42 { \
43 public: \
44  name##LayerVerifier(const std::string& layerName, \
45  const std::vector<armnn::TensorInfo>& inputInfos, \
46  const std::vector<armnn::TensorInfo>& outputInfos, \
47  const armnn::name##Descriptor& descriptor) \
48  : LayerVerifierBaseWithDescriptor<armnn::name##Descriptor>( \
49  layerName, inputInfos, outputInfos, descriptor) {} \
50 \
51  void Visit##name##Layer(const armnn::IConnectableLayer* layer, \
52  const armnn::name##Descriptor& descriptor, \
53  const char* name) override \
54  { \
55  VerifyNameAndConnections(layer, name); \
56  VerifyDescriptor(descriptor); \
57  } \
58 };
59 
60 struct DefaultLayerVerifierPolicy
61 {
62  static void Apply(const std::string)
63  {
64  BOOST_TEST_MESSAGE("Unexpected layer found in network");
65  BOOST_TEST(false);
66  }
67 };
68 
69 class LayerVerifierBase : public armnn::LayerVisitorBase<DefaultLayerVerifierPolicy>
70 {
71 public:
72  LayerVerifierBase(const std::string& layerName,
73  const std::vector<armnn::TensorInfo>& inputInfos,
74  const std::vector<armnn::TensorInfo>& outputInfos)
75  : m_LayerName(layerName)
76  , m_InputTensorInfos(inputInfos)
77  , m_OutputTensorInfos(outputInfos) {}
78 
79  void VisitInputLayer(const armnn::IConnectableLayer*, armnn::LayerBindingId, const char*) override {}
80 
81  void VisitOutputLayer(const armnn::IConnectableLayer*, armnn::LayerBindingId, const char*) override {}
82 
83 protected:
84  void VerifyNameAndConnections(const armnn::IConnectableLayer* layer, const char* name)
85  {
86  BOOST_TEST(name == m_LayerName.c_str());
87 
88  BOOST_TEST(layer->GetNumInputSlots() == m_InputTensorInfos.size());
89  BOOST_TEST(layer->GetNumOutputSlots() == m_OutputTensorInfos.size());
90 
91  for (unsigned int i = 0; i < m_InputTensorInfos.size(); i++)
92  {
93  const armnn::IOutputSlot* connectedOutput = layer->GetInputSlot(i).GetConnection();
94  BOOST_CHECK(connectedOutput);
95 
96  const armnn::TensorInfo& connectedInfo = connectedOutput->GetTensorInfo();
97  BOOST_TEST(connectedInfo.GetShape() == m_InputTensorInfos[i].GetShape());
98  BOOST_TEST(
99  GetDataTypeName(connectedInfo.GetDataType()) == GetDataTypeName(m_InputTensorInfos[i].GetDataType()));
100 
101  BOOST_TEST(connectedInfo.GetQuantizationScale() == m_InputTensorInfos[i].GetQuantizationScale());
102  BOOST_TEST(connectedInfo.GetQuantizationOffset() == m_InputTensorInfos[i].GetQuantizationOffset());
103  }
104 
105  for (unsigned int i = 0; i < m_OutputTensorInfos.size(); i++)
106  {
107  const armnn::TensorInfo& outputInfo = layer->GetOutputSlot(i).GetTensorInfo();
108  BOOST_TEST(outputInfo.GetShape() == m_OutputTensorInfos[i].GetShape());
109  BOOST_TEST(
110  GetDataTypeName(outputInfo.GetDataType()) == GetDataTypeName(m_OutputTensorInfos[i].GetDataType()));
111 
112  BOOST_TEST(outputInfo.GetQuantizationScale() == m_OutputTensorInfos[i].GetQuantizationScale());
113  BOOST_TEST(outputInfo.GetQuantizationOffset() == m_OutputTensorInfos[i].GetQuantizationOffset());
114  }
115  }
116 
117  void VerifyConstTensors(const std::string& tensorName,
118  const armnn::ConstTensor* expectedPtr,
119  const armnn::ConstTensor* actualPtr)
120  {
121  if (expectedPtr == nullptr)
122  {
123  BOOST_CHECK_MESSAGE(actualPtr == nullptr, tensorName + " should not exist");
124  }
125  else
126  {
127  BOOST_CHECK_MESSAGE(actualPtr != nullptr, tensorName + " should have been set");
128  if (actualPtr != nullptr)
129  {
130  const armnn::TensorInfo& expectedInfo = expectedPtr->GetInfo();
131  const armnn::TensorInfo& actualInfo = actualPtr->GetInfo();
132 
133  BOOST_CHECK_MESSAGE(expectedInfo.GetShape() == actualInfo.GetShape(),
134  tensorName + " shapes don't match");
135  BOOST_CHECK_MESSAGE(
136  GetDataTypeName(expectedInfo.GetDataType()) == GetDataTypeName(actualInfo.GetDataType()),
137  tensorName + " data types don't match");
138 
139  BOOST_CHECK_MESSAGE(expectedPtr->GetNumBytes() == actualPtr->GetNumBytes(),
140  tensorName + " (GetNumBytes) data sizes do not match");
141  if (expectedPtr->GetNumBytes() == actualPtr->GetNumBytes())
142  {
143  //check the data is identical
144  const char* expectedData = static_cast<const char*>(expectedPtr->GetMemoryArea());
145  const char* actualData = static_cast<const char*>(actualPtr->GetMemoryArea());
146  bool same = true;
147  for (unsigned int i = 0; i < expectedPtr->GetNumBytes(); ++i)
148  {
149  same = expectedData[i] == actualData[i];
150  if (!same)
151  {
152  break;
153  }
154  }
155  BOOST_CHECK_MESSAGE(same, tensorName + " data does not match");
156  }
157  }
158  }
159  }
160 
161 private:
162  std::string m_LayerName;
163  std::vector<armnn::TensorInfo> m_InputTensorInfos;
164  std::vector<armnn::TensorInfo> m_OutputTensorInfos;
165 };
166 
167 template<typename Descriptor>
168 class LayerVerifierBaseWithDescriptor : public LayerVerifierBase
169 {
170 public:
171  LayerVerifierBaseWithDescriptor(const std::string& layerName,
172  const std::vector<armnn::TensorInfo>& inputInfos,
173  const std::vector<armnn::TensorInfo>& outputInfos,
174  const Descriptor& descriptor)
175  : LayerVerifierBase(layerName, inputInfos, outputInfos)
176  , m_Descriptor(descriptor) {}
177 
178 protected:
179  void VerifyDescriptor(const Descriptor& descriptor)
180  {
181  BOOST_CHECK(descriptor == m_Descriptor);
182  }
183 
184  Descriptor m_Descriptor;
185 };
186 
187 template<typename T>
188 void CompareConstTensorData(const void* data1, const void* data2, unsigned int numElements)
189 {
190  T typedData1 = static_cast<T>(data1);
191  T typedData2 = static_cast<T>(data2);
192  BOOST_CHECK(typedData1);
193  BOOST_CHECK(typedData2);
194 
195  for (unsigned int i = 0; i < numElements; i++)
196  {
197  BOOST_TEST(typedData1[i] == typedData2[i]);
198  }
199 }
200 
201 void CompareConstTensor(const armnn::ConstTensor& tensor1, const armnn::ConstTensor& tensor2)
202 {
203  BOOST_TEST(tensor1.GetShape() == tensor2.GetShape());
204  BOOST_TEST(GetDataTypeName(tensor1.GetDataType()) == GetDataTypeName(tensor2.GetDataType()));
205 
206  switch (tensor1.GetDataType())
207  {
209  CompareConstTensorData<const float*>(
210  tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
211  break;
214  CompareConstTensorData<const uint8_t*>(
215  tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
216  break;
218  CompareConstTensorData<const int8_t*>(
219  tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
220  break;
222  CompareConstTensorData<const int32_t*>(
223  tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
224  break;
225  default:
226  // Note that Float16 is not yet implemented
227  BOOST_TEST_MESSAGE("Unexpected datatype");
228  BOOST_TEST(false);
229  }
230 }
231 
232 armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString)
233 {
234  std::vector<std::uint8_t> const serializerVector{serializerString.begin(), serializerString.end()};
235  return IDeserializer::Create()->CreateNetworkFromBinary(serializerVector);
236 }
237 
238 std::string SerializeNetwork(const armnn::INetwork& network)
239 {
241  serializer.Serialize(network);
242 
243  std::stringstream stream;
244  serializer.SaveSerializedToStream(stream);
245 
246  std::string serializerString{stream.str()};
247  return serializerString;
248 }
249 
250 template<typename DataType>
251 static std::vector<DataType> GenerateRandomData(size_t size)
252 {
253  constexpr bool isIntegerType = std::is_integral<DataType>::value;
254  using Distribution =
255  typename std::conditional<isIntegerType,
256  std::uniform_int_distribution<DataType>,
257  std::uniform_real_distribution<DataType>>::type;
258 
259  static constexpr DataType lowerLimit = std::numeric_limits<DataType>::min();
260  static constexpr DataType upperLimit = std::numeric_limits<DataType>::max();
261 
262  static Distribution distribution(lowerLimit, upperLimit);
263  static std::default_random_engine generator;
264 
265  std::vector<DataType> randomData(size);
266  std::generate(randomData.begin(), randomData.end(), []() { return distribution(generator); });
267 
268  return randomData;
269 }
270 
271 } // anonymous namespace
272 
273 BOOST_AUTO_TEST_SUITE(SerializerTests)
274 
275 BOOST_AUTO_TEST_CASE(SerializeAddition)
276 {
278 
279  const std::string layerName("addition");
280  const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32);
281 
283  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
284  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
285  armnn::IConnectableLayer* const additionLayer = network->AddAdditionLayer(layerName.c_str());
286  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
287 
288  inputLayer0->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
289  inputLayer1->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
290  additionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
291 
292  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
293  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
294  additionLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
295 
296  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
297  BOOST_CHECK(deserializedNetwork);
298 
299  AdditionLayerVerifier verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo});
300  deserializedNetwork->Accept(verifier);
301 }
302 
303 BOOST_AUTO_TEST_CASE(SerializeArgMinMax)
304 {
306 
307  const std::string layerName("argminmax");
308  const armnn::TensorInfo inputInfo({1, 2, 3}, armnn::DataType::Float32);
309  const armnn::TensorInfo outputInfo({1, 3}, armnn::DataType::Signed32);
310 
311  armnn::ArgMinMaxDescriptor descriptor;
313  descriptor.m_Axis = 1;
314 
316  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
317  armnn::IConnectableLayer* const argMinMaxLayer = network->AddArgMinMaxLayer(descriptor, layerName.c_str());
318  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
319 
320  inputLayer->GetOutputSlot(0).Connect(argMinMaxLayer->GetInputSlot(0));
321  argMinMaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
322 
323  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
324  argMinMaxLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
325 
326  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
327  BOOST_CHECK(deserializedNetwork);
328 
329  ArgMinMaxLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
330  deserializedNetwork->Accept(verifier);
331 }
332 
333 BOOST_AUTO_TEST_CASE(SerializeBatchNormalization)
334 {
335  using Descriptor = armnn::BatchNormalizationDescriptor;
336  class BatchNormalizationLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
337  {
338  public:
339  BatchNormalizationLayerVerifier(const std::string& layerName,
340  const std::vector<armnn::TensorInfo>& inputInfos,
341  const std::vector<armnn::TensorInfo>& outputInfos,
342  const Descriptor& descriptor,
343  const armnn::ConstTensor& mean,
344  const armnn::ConstTensor& variance,
345  const armnn::ConstTensor& beta,
346  const armnn::ConstTensor& gamma)
347  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
348  , m_Mean(mean)
349  , m_Variance(variance)
350  , m_Beta(beta)
351  , m_Gamma(gamma) {}
352 
353  void VisitBatchNormalizationLayer(const armnn::IConnectableLayer* layer,
354  const Descriptor& descriptor,
355  const armnn::ConstTensor& mean,
356  const armnn::ConstTensor& variance,
357  const armnn::ConstTensor& beta,
358  const armnn::ConstTensor& gamma,
359  const char* name) override
360  {
361  VerifyNameAndConnections(layer, name);
362  VerifyDescriptor(descriptor);
363 
364  CompareConstTensor(mean, m_Mean);
365  CompareConstTensor(variance, m_Variance);
366  CompareConstTensor(beta, m_Beta);
367  CompareConstTensor(gamma, m_Gamma);
368  }
369 
370  private:
371  armnn::ConstTensor m_Mean;
372  armnn::ConstTensor m_Variance;
373  armnn::ConstTensor m_Beta;
374  armnn::ConstTensor m_Gamma;
375  };
376 
377  const std::string layerName("batchNormalization");
378  const armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32);
379  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
380 
381  const armnn::TensorInfo meanInfo({1}, armnn::DataType::Float32);
382  const armnn::TensorInfo varianceInfo({1}, armnn::DataType::Float32);
383  const armnn::TensorInfo betaInfo({1}, armnn::DataType::Float32);
384  const armnn::TensorInfo gammaInfo({1}, armnn::DataType::Float32);
385 
387  descriptor.m_Eps = 0.0010000000475f;
388  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
389 
390  std::vector<float> meanData({5.0});
391  std::vector<float> varianceData({2.0});
392  std::vector<float> betaData({1.0});
393  std::vector<float> gammaData({0.0});
394 
395  armnn::ConstTensor mean(meanInfo, meanData);
396  armnn::ConstTensor variance(varianceInfo, varianceData);
397  armnn::ConstTensor beta(betaInfo, betaData);
398  armnn::ConstTensor gamma(gammaInfo, gammaData);
399 
401  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
402  armnn::IConnectableLayer* const batchNormalizationLayer =
403  network->AddBatchNormalizationLayer(descriptor, mean, variance, beta, gamma, layerName.c_str());
404  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
405 
406  inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
407  batchNormalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
408 
409  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
410  batchNormalizationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
411 
412  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
413  BOOST_CHECK(deserializedNetwork);
414 
415  BatchNormalizationLayerVerifier verifier(
416  layerName, {inputInfo}, {outputInfo}, descriptor, mean, variance, beta, gamma);
417  deserializedNetwork->Accept(verifier);
418 }
419 
420 BOOST_AUTO_TEST_CASE(SerializeBatchToSpaceNd)
421 {
423 
424  const std::string layerName("spaceToBatchNd");
425  const armnn::TensorInfo inputInfo({4, 1, 2, 2}, armnn::DataType::Float32);
426  const armnn::TensorInfo outputInfo({1, 1, 4, 4}, armnn::DataType::Float32);
427 
430  desc.m_BlockShape = {2, 2};
431  desc.m_Crops = {{0, 0}, {0, 0}};
432 
434  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
435  armnn::IConnectableLayer* const batchToSpaceNdLayer = network->AddBatchToSpaceNdLayer(desc, layerName.c_str());
436  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
437 
438  inputLayer->GetOutputSlot(0).Connect(batchToSpaceNdLayer->GetInputSlot(0));
439  batchToSpaceNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
440 
441  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
442  batchToSpaceNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
443 
444  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
445  BOOST_CHECK(deserializedNetwork);
446 
447  BatchToSpaceNdLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
448  deserializedNetwork->Accept(verifier);
449 }
450 
451 BOOST_AUTO_TEST_CASE(SerializeComparison)
452 {
454 
455  const std::string layerName("comparison");
456 
457  const armnn::TensorShape shape{2, 1, 2, 4};
458 
461 
463 
465  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
466  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
467  armnn::IConnectableLayer* const comparisonLayer = network->AddComparisonLayer(descriptor, layerName.c_str());
468  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
469 
470  inputLayer0->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(0));
471  inputLayer1->GetOutputSlot(0).Connect(comparisonLayer->GetInputSlot(1));
472  comparisonLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
473 
474  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
475  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
476  comparisonLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
477 
478  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
479  BOOST_CHECK(deserializedNetwork);
480 
481  ComparisonLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
482  deserializedNetwork->Accept(verifier);
483 }
484 
485 BOOST_AUTO_TEST_CASE(SerializeConstant)
486 {
487  class ConstantLayerVerifier : public LayerVerifierBase
488  {
489  public:
490  ConstantLayerVerifier(const std::string& layerName,
491  const std::vector<armnn::TensorInfo>& inputInfos,
492  const std::vector<armnn::TensorInfo>& outputInfos,
493  const armnn::ConstTensor& layerInput)
494  : LayerVerifierBase(layerName, inputInfos, outputInfos)
495  , m_LayerInput(layerInput) {}
496 
497  void VisitConstantLayer(const armnn::IConnectableLayer* layer,
498  const armnn::ConstTensor& input,
499  const char* name) override
500  {
501  VerifyNameAndConnections(layer, name);
502  CompareConstTensor(input, m_LayerInput);
503  }
504 
505  void VisitAdditionLayer(const armnn::IConnectableLayer*, const char*) override {}
506 
507  private:
508  armnn::ConstTensor m_LayerInput;
509  };
510 
511  const std::string layerName("constant");
512  const armnn::TensorInfo info({ 2, 3 }, armnn::DataType::Float32);
513 
514  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
515  armnn::ConstTensor constTensor(info, constantData);
516 
518  armnn::IConnectableLayer* input = network->AddInputLayer(0);
519  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
520  armnn::IConnectableLayer* add = network->AddAdditionLayer();
521  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
522 
523  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
524  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
525  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
526 
527  input->GetOutputSlot(0).SetTensorInfo(info);
528  constant->GetOutputSlot(0).SetTensorInfo(info);
529  add->GetOutputSlot(0).SetTensorInfo(info);
530 
531  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
532  BOOST_CHECK(deserializedNetwork);
533 
534  ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
535  deserializedNetwork->Accept(verifier);
536 }
537 
538 BOOST_AUTO_TEST_CASE(SerializeConvolution2d)
539 {
540  using Descriptor = armnn::Convolution2dDescriptor;
541  class Convolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
542  {
543  public:
544  Convolution2dLayerVerifier(const std::string& layerName,
545  const std::vector<armnn::TensorInfo>& inputInfos,
546  const std::vector<armnn::TensorInfo>& outputInfos,
547  const Descriptor& descriptor,
548  const armnn::ConstTensor& weights,
550  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
551  , m_Weights(weights)
552  , m_Biases(biases) {}
553 
554  void VisitConvolution2dLayer(const armnn::IConnectableLayer* layer,
555  const Descriptor& descriptor,
556  const armnn::ConstTensor& weights,
558  const char* name) override
559  {
560  VerifyNameAndConnections(layer, name);
561  VerifyDescriptor(descriptor);
562 
563  // check weights
564  CompareConstTensor(weights, m_Weights);
565 
566  // check biases
567  BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
568  BOOST_CHECK(biases.has_value() == m_Biases.has_value());
569 
570  if (biases.has_value() && m_Biases.has_value())
571  {
572  CompareConstTensor(biases.value(), m_Biases.value());
573  }
574  }
575 
576  private:
577  armnn::ConstTensor m_Weights;
579  };
580 
581  const std::string layerName("convolution2d");
582  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
583  const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
584 
585  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
586  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32);
587 
588  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
589  armnn::ConstTensor weights(weightsInfo, weightsData);
590 
591  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
592  armnn::ConstTensor biases(biasesInfo, biasesData);
593 
595  descriptor.m_PadLeft = 1;
596  descriptor.m_PadRight = 1;
597  descriptor.m_PadTop = 1;
598  descriptor.m_PadBottom = 1;
599  descriptor.m_StrideX = 2;
600  descriptor.m_StrideY = 2;
601  descriptor.m_DilationX = 2;
602  descriptor.m_DilationY = 2;
603  descriptor.m_BiasEnabled = true;
605 
607  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
608  armnn::IConnectableLayer* const convLayer =
609  network->AddConvolution2dLayer(descriptor,
610  weights,
612  layerName.c_str());
613  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
614 
615  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
616  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
617 
618  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
619  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
620 
621  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
622  BOOST_CHECK(deserializedNetwork);
623 
624  Convolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
625  deserializedNetwork->Accept(verifier);
626 }
627 
628 BOOST_AUTO_TEST_CASE(SerializeConvolution2dWithPerAxisParams)
629 {
630  using Descriptor = armnn::Convolution2dDescriptor;
631  class Convolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
632  {
633  public:
634  Convolution2dLayerVerifier(const std::string& layerName,
635  const std::vector<armnn::TensorInfo>& inputInfos,
636  const std::vector<armnn::TensorInfo>& outputInfos,
637  const Descriptor& descriptor,
638  const armnn::ConstTensor& weights,
640  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
641  , m_Weights(weights)
642  , m_Biases(biases) {}
643 
644  void VisitConvolution2dLayer(const armnn::IConnectableLayer* layer,
645  const Descriptor& descriptor,
646  const armnn::ConstTensor& weights,
648  const char* name) override
649  {
650  VerifyNameAndConnections(layer, name);
651  VerifyDescriptor(descriptor);
652 
653  // check weights
654  CompareConstTensor(weights, m_Weights);
655 
656  // check biases
657  BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
658  BOOST_CHECK(biases.has_value() == m_Biases.has_value());
659 
660  if (biases.has_value() && m_Biases.has_value())
661  {
662  CompareConstTensor(biases.value(), m_Biases.value());
663  }
664  }
665 
666  private:
667  armnn::ConstTensor m_Weights;
669  };
670 
671  using namespace armnn;
672 
673  const std::string layerName("convolution2dWithPerAxis");
674  const TensorInfo inputInfo ({ 1, 3, 1, 2 }, DataType::QAsymmU8, 0.55f, 128);
675  const TensorInfo outputInfo({ 1, 3, 1, 3 }, DataType::QAsymmU8, 0.75f, 128);
676 
677  const std::vector<float> quantScales{ 0.75f, 0.65f, 0.85f };
678  constexpr unsigned int quantDimension = 0;
679 
680  const TensorInfo kernelInfo({ 3, 1, 1, 2 }, DataType::QSymmS8, quantScales, quantDimension);
681 
682  const std::vector<float> biasQuantScales{ 0.25f, 0.50f, 0.75f };
683  const TensorInfo biasInfo({ 3 }, DataType::Signed32, biasQuantScales, quantDimension);
684 
685  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
686  armnn::ConstTensor weights(kernelInfo, kernelData);
687  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
688  armnn::ConstTensor biases(biasInfo, biasData);
689 
690  Convolution2dDescriptor descriptor;
691  descriptor.m_StrideX = 1;
692  descriptor.m_StrideY = 1;
693  descriptor.m_PadLeft = 0;
694  descriptor.m_PadRight = 0;
695  descriptor.m_PadTop = 0;
696  descriptor.m_PadBottom = 0;
697  descriptor.m_BiasEnabled = true;
699 
701  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
702  armnn::IConnectableLayer* const convLayer =
703  network->AddConvolution2dLayer(descriptor,
704  weights,
706  layerName.c_str());
707  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
708 
709  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
710  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
711 
712  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
713  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
714 
715  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
716  BOOST_CHECK(deserializedNetwork);
717 
718  Convolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
719  deserializedNetwork->Accept(verifier);
720 }
721 
722 BOOST_AUTO_TEST_CASE(SerializeDepthToSpace)
723 {
725 
726  const std::string layerName("depthToSpace");
727 
728  const armnn::TensorInfo inputInfo ({ 1, 8, 4, 12 }, armnn::DataType::Float32);
729  const armnn::TensorInfo outputInfo({ 1, 16, 8, 3 }, armnn::DataType::Float32);
730 
732  desc.m_BlockSize = 2;
733  desc.m_DataLayout = armnn::DataLayout::NHWC;
734 
736  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
737  armnn::IConnectableLayer* const depthToSpaceLayer = network->AddDepthToSpaceLayer(desc, layerName.c_str());
738  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
739 
740  inputLayer->GetOutputSlot(0).Connect(depthToSpaceLayer->GetInputSlot(0));
741  depthToSpaceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
742 
743  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
744  depthToSpaceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
745 
746  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
747  BOOST_CHECK(deserializedNetwork);
748 
749  DepthToSpaceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
750  deserializedNetwork->Accept(verifier);
751 }
752 
753 BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2d)
754 {
755  using Descriptor = armnn::DepthwiseConvolution2dDescriptor;
756  class DepthwiseConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
757  {
758  public:
759  DepthwiseConvolution2dLayerVerifier(const std::string& layerName,
760  const std::vector<armnn::TensorInfo>& inputInfos,
761  const std::vector<armnn::TensorInfo>& outputInfos,
762  const Descriptor& descriptor,
763  const armnn::ConstTensor& weights,
764  const armnn::Optional<armnn::ConstTensor>& biases) :
765  LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor),
766  m_Weights(weights),
767  m_Biases(biases) {}
768 
769  void VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
770  const Descriptor& descriptor,
771  const armnn::ConstTensor& weights,
773  const char* name) override
774  {
775  VerifyNameAndConnections(layer, name);
776  VerifyDescriptor(descriptor);
777 
778  // check weights
779  CompareConstTensor(weights, m_Weights);
780 
781  // check biases
782  BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
783  BOOST_CHECK(biases.has_value() == m_Biases.has_value());
784 
785  if (biases.has_value() && m_Biases.has_value())
786  {
787  CompareConstTensor(biases.value(), m_Biases.value());
788  }
789  }
790 
791  private:
792  armnn::ConstTensor m_Weights;
794  };
795 
796  const std::string layerName("depwiseConvolution2d");
797  const armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32);
798  const armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
799 
800  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
801  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
802 
803  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
804  armnn::ConstTensor weights(weightsInfo, weightsData);
805 
806  std::vector<int32_t> biasesData = GenerateRandomData<int32_t>(biasesInfo.GetNumElements());
807  armnn::ConstTensor biases(biasesInfo, biasesData);
808 
810  descriptor.m_PadLeft = 1;
811  descriptor.m_PadRight = 1;
812  descriptor.m_PadTop = 1;
813  descriptor.m_PadBottom = 1;
814  descriptor.m_StrideX = 2;
815  descriptor.m_StrideY = 2;
816  descriptor.m_DilationX = 2;
817  descriptor.m_DilationY = 2;
818  descriptor.m_BiasEnabled = true;
820 
822  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
823  armnn::IConnectableLayer* const depthwiseConvLayer =
824  network->AddDepthwiseConvolution2dLayer(descriptor,
825  weights,
827  layerName.c_str());
828  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
829 
830  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
831  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
832 
833  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
834  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
835 
836  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
837  BOOST_CHECK(deserializedNetwork);
838 
839  DepthwiseConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
840  deserializedNetwork->Accept(verifier);
841 }
842 
843 BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2dWithPerAxisParams)
844 {
845  using Descriptor = armnn::DepthwiseConvolution2dDescriptor;
846  class DepthwiseConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
847  {
848  public:
849  DepthwiseConvolution2dLayerVerifier(const std::string& layerName,
850  const std::vector<armnn::TensorInfo>& inputInfos,
851  const std::vector<armnn::TensorInfo>& outputInfos,
852  const Descriptor& descriptor,
853  const armnn::ConstTensor& weights,
854  const armnn::Optional<armnn::ConstTensor>& biases) :
855  LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor),
856  m_Weights(weights),
857  m_Biases(biases) {}
858 
859  void VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
860  const Descriptor& descriptor,
861  const armnn::ConstTensor& weights,
863  const char* name) override
864  {
865  VerifyNameAndConnections(layer, name);
866  VerifyDescriptor(descriptor);
867 
868  // check weights
869  CompareConstTensor(weights, m_Weights);
870 
871  // check biases
872  BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
873  BOOST_CHECK(biases.has_value() == m_Biases.has_value());
874 
875  if (biases.has_value() && m_Biases.has_value())
876  {
877  CompareConstTensor(biases.value(), m_Biases.value());
878  }
879  }
880 
881  private:
882  armnn::ConstTensor m_Weights;
884  };
885 
886  using namespace armnn;
887 
888  const std::string layerName("depwiseConvolution2dWithPerAxis");
889  const TensorInfo inputInfo ({ 1, 3, 3, 2 }, DataType::QAsymmU8, 0.55f, 128);
890  const TensorInfo outputInfo({ 1, 2, 2, 4 }, DataType::QAsymmU8, 0.75f, 128);
891 
892  const std::vector<float> quantScales{ 0.75f, 0.80f, 0.90f, 0.95f };
893  const unsigned int quantDimension = 0;
894  TensorInfo kernelInfo({ 2, 2, 2, 2 }, DataType::QSymmS8, quantScales, quantDimension);
895 
896  const std::vector<float> biasQuantScales{ 0.25f, 0.35f, 0.45f, 0.55f };
897  constexpr unsigned int biasQuantDimension = 0;
898  TensorInfo biasInfo({ 4 }, DataType::Signed32, biasQuantScales, biasQuantDimension);
899 
900  std::vector<int8_t> kernelData = GenerateRandomData<int8_t>(kernelInfo.GetNumElements());
901  armnn::ConstTensor weights(kernelInfo, kernelData);
902  std::vector<int32_t> biasData = GenerateRandomData<int32_t>(biasInfo.GetNumElements());
903  armnn::ConstTensor biases(biasInfo, biasData);
904 
906  descriptor.m_StrideX = 1;
907  descriptor.m_StrideY = 1;
908  descriptor.m_PadLeft = 0;
909  descriptor.m_PadRight = 0;
910  descriptor.m_PadTop = 0;
911  descriptor.m_PadBottom = 0;
912  descriptor.m_DilationX = 1;
913  descriptor.m_DilationY = 1;
914  descriptor.m_BiasEnabled = true;
916 
918  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
919  armnn::IConnectableLayer* const depthwiseConvLayer =
920  network->AddDepthwiseConvolution2dLayer(descriptor,
921  weights,
923  layerName.c_str());
924  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
925 
926  inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
927  depthwiseConvLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
928 
929  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
930  depthwiseConvLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
931 
932  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
933  BOOST_CHECK(deserializedNetwork);
934 
935  DepthwiseConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
936  deserializedNetwork->Accept(verifier);
937 }
938 
939 BOOST_AUTO_TEST_CASE(SerializeDequantize)
940 {
942 
943  const std::string layerName("dequantize");
944  const armnn::TensorInfo inputInfo({ 1, 5, 2, 3 }, armnn::DataType::QAsymmU8, 0.5f, 1);
945  const armnn::TensorInfo outputInfo({ 1, 5, 2, 3 }, armnn::DataType::Float32);
946 
948  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
949  armnn::IConnectableLayer* const dequantizeLayer = network->AddDequantizeLayer(layerName.c_str());
950  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
951 
952  inputLayer->GetOutputSlot(0).Connect(dequantizeLayer->GetInputSlot(0));
953  dequantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
954 
955  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
956  dequantizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
957 
958  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
959  BOOST_CHECK(deserializedNetwork);
960 
961  DequantizeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo});
962  deserializedNetwork->Accept(verifier);
963 }
964 
965 BOOST_AUTO_TEST_CASE(SerializeDeserializeDetectionPostProcess)
966 {
967  using Descriptor = armnn::DetectionPostProcessDescriptor;
968  class DetectionPostProcessLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
969  {
970  public:
971  DetectionPostProcessLayerVerifier(const std::string& layerName,
972  const std::vector<armnn::TensorInfo>& inputInfos,
973  const std::vector<armnn::TensorInfo>& outputInfos,
974  const Descriptor& descriptor,
976  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
977  , m_Anchors(anchors) {}
978 
979  void VisitDetectionPostProcessLayer(const armnn::IConnectableLayer* layer,
980  const Descriptor& descriptor,
981  const armnn::ConstTensor& anchors,
982  const char* name) override
983  {
984  VerifyNameAndConnections(layer, name);
985  VerifyDescriptor(descriptor);
986 
987  CompareConstTensor(anchors, m_Anchors);
988  }
989 
990  private:
991  armnn::ConstTensor m_Anchors;
992  };
993 
994  const std::string layerName("detectionPostProcess");
995 
996  const std::vector<armnn::TensorInfo> inputInfos({
999  });
1000 
1001  const std::vector<armnn::TensorInfo> outputInfos({
1006  });
1007 
1009  descriptor.m_UseRegularNms = true;
1010  descriptor.m_MaxDetections = 3;
1011  descriptor.m_MaxClassesPerDetection = 1;
1012  descriptor.m_DetectionsPerClass =1;
1013  descriptor.m_NmsScoreThreshold = 0.0;
1014  descriptor.m_NmsIouThreshold = 0.5;
1015  descriptor.m_NumClasses = 2;
1016  descriptor.m_ScaleY = 10.0;
1017  descriptor.m_ScaleX = 10.0;
1018  descriptor.m_ScaleH = 5.0;
1019  descriptor.m_ScaleW = 5.0;
1020 
1022  const std::vector<float> anchorsData({
1023  0.5f, 0.5f, 1.0f, 1.0f,
1024  0.5f, 0.5f, 1.0f, 1.0f,
1025  0.5f, 0.5f, 1.0f, 1.0f,
1026  0.5f, 10.5f, 1.0f, 1.0f,
1027  0.5f, 10.5f, 1.0f, 1.0f,
1028  0.5f, 100.5f, 1.0f, 1.0f
1029  });
1030  armnn::ConstTensor anchors(anchorsInfo, anchorsData);
1031 
1033  armnn::IConnectableLayer* const detectionLayer =
1034  network->AddDetectionPostProcessLayer(descriptor, anchors, layerName.c_str());
1035 
1036  for (unsigned int i = 0; i < 2; i++)
1037  {
1038  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(static_cast<int>(i));
1039  inputLayer->GetOutputSlot(0).Connect(detectionLayer->GetInputSlot(i));
1040  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfos[i]);
1041  }
1042 
1043  for (unsigned int i = 0; i < 4; i++)
1044  {
1045  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(static_cast<int>(i));
1046  detectionLayer->GetOutputSlot(i).Connect(outputLayer->GetInputSlot(0));
1047  detectionLayer->GetOutputSlot(i).SetTensorInfo(outputInfos[i]);
1048  }
1049 
1050  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1051  BOOST_CHECK(deserializedNetwork);
1052 
1053  DetectionPostProcessLayerVerifier verifier(layerName, inputInfos, outputInfos, descriptor, anchors);
1054  deserializedNetwork->Accept(verifier);
1055 }
1056 
1057 BOOST_AUTO_TEST_CASE(SerializeDivision)
1058 {
1060 
1061  const std::string layerName("division");
1062  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
1063 
1065  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1066  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1067  armnn::IConnectableLayer* const divisionLayer = network->AddDivisionLayer(layerName.c_str());
1068  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1069 
1070  inputLayer0->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(0));
1071  inputLayer1->GetOutputSlot(0).Connect(divisionLayer->GetInputSlot(1));
1072  divisionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1073 
1074  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1075  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1076  divisionLayer->GetOutputSlot(0).SetTensorInfo(info);
1077 
1078  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1079  BOOST_CHECK(deserializedNetwork);
1080 
1081  DivisionLayerVerifier verifier(layerName, {info, info}, {info});
1082  deserializedNetwork->Accept(verifier);
1083 }
1084 
1085 class EqualLayerVerifier : public LayerVerifierBase
1086 {
1087 public:
1088  EqualLayerVerifier(const std::string& layerName,
1089  const std::vector<armnn::TensorInfo>& inputInfos,
1090  const std::vector<armnn::TensorInfo>& outputInfos)
1091  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
1092 
1093  void VisitComparisonLayer(const armnn::IConnectableLayer* layer,
1094  const armnn::ComparisonDescriptor& descriptor,
1095  const char* name) override
1096  {
1097  VerifyNameAndConnections(layer, name);
1098  BOOST_CHECK(descriptor.m_Operation == armnn::ComparisonOperation::Equal);
1099  }
1100 
1101  void VisitEqualLayer(const armnn::IConnectableLayer*, const char*) override
1102  {
1103  throw armnn::Exception("EqualLayer should have translated to ComparisonLayer");
1104  }
1105 };
1106 
1107 // NOTE: Until the deprecated AddEqualLayer disappears this test checks that calling
1108 // AddEqualLayer places a ComparisonLayer into the serialized format and that
1109 // when this deserialises we have a ComparisonLayer
1110 BOOST_AUTO_TEST_CASE(SerializeEqual)
1111 {
1112  const std::string layerName("equal");
1113 
1114  const armnn::TensorShape shape{2, 1, 2, 4};
1115 
1118 
1120  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1121  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1123  armnn::IConnectableLayer* const equalLayer = network->AddEqualLayer(layerName.c_str());
1125  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1126 
1127  inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
1128  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
1129  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1130 
1131  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1132  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1133  equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1134 
1135  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1136  BOOST_CHECK(deserializedNetwork);
1137 
1138  EqualLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
1139  deserializedNetwork->Accept(verifier);
1140 }
1141 
1142 BOOST_AUTO_TEST_CASE(EnsureEqualBackwardCompatibility)
1143 {
1144  // The hex data below is a flat buffer containing a simple network with two inputs,
1145  // an EqualLayer (now deprecated) and an output
1146  //
1147  // This test verifies that we can still deserialize this old-style model by replacing
1148  // the EqualLayer with an equivalent ComparisonLayer
1149  const std::vector<uint8_t> equalModel =
1150  {
1151  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1152  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1153  0xCC, 0x01, 0x00, 0x00, 0x20, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1154  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1155  0x60, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1156  0x00, 0x00, 0x06, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xEA, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1157  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1158  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1159  0x64, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xB4, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1160  0x00, 0x13, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1161  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1162  0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x71, 0x75, 0x61, 0x6C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1163  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1164  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x04, 0x08, 0x00, 0x00, 0x00,
1165  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
1166  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1167  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1168  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1169  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1170  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1171  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1172  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1173  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1174  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1175  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1176  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1177  0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
1178  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1179  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1180  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1181  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1182  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1183  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1184  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1185  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1186  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1187  0x04, 0x00, 0x00, 0x00
1188  };
1189 
1190  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(equalModel.begin(), equalModel.end()));
1191  BOOST_CHECK(deserializedNetwork);
1192 
1193  const armnn::TensorShape shape{ 2, 1, 2, 4 };
1194 
1197 
1198  EqualLayerVerifier verifier("equal", { inputInfo, inputInfo }, { outputInfo });
1199  deserializedNetwork->Accept(verifier);
1200 }
1201 
1202 BOOST_AUTO_TEST_CASE(SerializeFill)
1203 {
1205 
1206  const std::string layerName("fill");
1207  const armnn::TensorInfo inputInfo({4}, armnn::DataType::Signed32);
1208  const armnn::TensorInfo outputInfo({1, 3, 3, 1}, armnn::DataType::Float32);
1209 
1210  armnn::FillDescriptor descriptor(1.0f);
1211 
1213  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1214  armnn::IConnectableLayer* const fillLayer = network->AddFillLayer(descriptor, layerName.c_str());
1215  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1216 
1217  inputLayer->GetOutputSlot(0).Connect(fillLayer->GetInputSlot(0));
1218  fillLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1219 
1220  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1221  fillLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1222 
1223  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1224  BOOST_CHECK(deserializedNetwork);
1225 
1226  FillLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1227 
1228  deserializedNetwork->Accept(verifier);
1229 }
1230 
1231 BOOST_AUTO_TEST_CASE(SerializeFloor)
1232 {
1234 
1235  const std::string layerName("floor");
1237 
1239  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1240  armnn::IConnectableLayer* const floorLayer = network->AddFloorLayer(layerName.c_str());
1241  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1242 
1243  inputLayer->GetOutputSlot(0).Connect(floorLayer->GetInputSlot(0));
1244  floorLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1245 
1246  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1247  floorLayer->GetOutputSlot(0).SetTensorInfo(info);
1248 
1249  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1250  BOOST_CHECK(deserializedNetwork);
1251 
1252  FloorLayerVerifier verifier(layerName, {info}, {info});
1253  deserializedNetwork->Accept(verifier);
1254 }
1255 
1256 BOOST_AUTO_TEST_CASE(SerializeFullyConnected)
1257 {
1258  using Descriptor = armnn::FullyConnectedDescriptor;
1259  class FullyConnectedLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
1260  {
1261  public:
1262  FullyConnectedLayerVerifier(const std::string& layerName,
1263  const std::vector<armnn::TensorInfo>& inputInfos,
1264  const std::vector<armnn::TensorInfo>& outputInfos,
1265  const Descriptor& descriptor,
1266  const armnn::ConstTensor& weight,
1268  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
1269  , m_Weight(weight)
1270  , m_Bias(bias) {}
1271 
1272  void VisitFullyConnectedLayer(const armnn::IConnectableLayer* layer,
1273  const Descriptor& descriptor,
1274  const armnn::ConstTensor& weight,
1276  const char* name) override
1277  {
1278  VerifyNameAndConnections(layer, name);
1279  VerifyDescriptor(descriptor);
1280 
1281  CompareConstTensor(weight, m_Weight);
1282 
1283  BOOST_TEST(bias.has_value() == descriptor.m_BiasEnabled);
1284  BOOST_TEST(bias.has_value() == m_Bias.has_value());
1285 
1286  if (bias.has_value() && m_Bias.has_value())
1287  {
1288  CompareConstTensor(bias.value(), m_Bias.value());
1289  }
1290  }
1291 
1292  private:
1293  armnn::ConstTensor m_Weight;
1295  };
1296 
1297  const std::string layerName("fullyConnected");
1298  const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
1299  const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
1300 
1301  const armnn::TensorInfo weightsInfo({ 5, 3 }, armnn::DataType::Float32);
1302  const armnn::TensorInfo biasesInfo ({ 3 }, armnn::DataType::Float32);
1303  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
1304  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
1305  armnn::ConstTensor weights(weightsInfo, weightsData);
1306  armnn::ConstTensor biases(biasesInfo, biasesData);
1307 
1309  descriptor.m_BiasEnabled = true;
1310  descriptor.m_TransposeWeightMatrix = false;
1311 
1313  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1314  armnn::IConnectableLayer* const fullyConnectedLayer =
1315  network->AddFullyConnectedLayer(descriptor,
1316  weights,
1318  layerName.c_str());
1319  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1320 
1321  inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
1322  fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1323 
1324  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1325  fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1326 
1327  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1328  BOOST_CHECK(deserializedNetwork);
1329 
1330  FullyConnectedLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
1331  deserializedNetwork->Accept(verifier);
1332 }
1333 
1334 BOOST_AUTO_TEST_CASE(SerializeGather)
1335 {
1337  class GatherLayerVerifier : public LayerVerifierBaseWithDescriptor<GatherDescriptor>
1338  {
1339  public:
1340  GatherLayerVerifier(const std::string& layerName,
1341  const std::vector<armnn::TensorInfo>& inputInfos,
1342  const std::vector<armnn::TensorInfo>& outputInfos,
1343  const GatherDescriptor& descriptor)
1344  : LayerVerifierBaseWithDescriptor<GatherDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1345 
1346  void VisitGatherLayer(const armnn::IConnectableLayer* layer,
1347  const GatherDescriptor& descriptor,
1348  const char *name) override
1349  {
1350  VerifyNameAndConnections(layer, name);
1351  BOOST_CHECK(descriptor.m_Axis == m_Descriptor.m_Axis);
1352  }
1353 
1354  void VisitConstantLayer(const armnn::IConnectableLayer*,
1355  const armnn::ConstTensor&,
1356  const char*) override {}
1357  };
1358 
1359  const std::string layerName("gather");
1360  armnn::TensorInfo paramsInfo({ 8 }, armnn::DataType::QAsymmU8);
1361  armnn::TensorInfo outputInfo({ 3 }, armnn::DataType::QAsymmU8);
1362  const armnn::TensorInfo indicesInfo({ 3 }, armnn::DataType::Signed32);
1363  GatherDescriptor descriptor;
1364  descriptor.m_Axis = 1;
1365 
1366  paramsInfo.SetQuantizationScale(1.0f);
1367  paramsInfo.SetQuantizationOffset(0);
1368  outputInfo.SetQuantizationScale(1.0f);
1369  outputInfo.SetQuantizationOffset(0);
1370 
1371  const std::vector<int32_t>& indicesData = {7, 6, 5};
1372 
1374  armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
1375  armnn::IConnectableLayer *const constantLayer =
1376  network->AddConstantLayer(armnn::ConstTensor(indicesInfo, indicesData));
1377  armnn::IConnectableLayer *const gatherLayer = network->AddGatherLayer(descriptor, layerName.c_str());
1378  armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
1379 
1380  inputLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(0));
1381  constantLayer->GetOutputSlot(0).Connect(gatherLayer->GetInputSlot(1));
1382  gatherLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1383 
1384  inputLayer->GetOutputSlot(0).SetTensorInfo(paramsInfo);
1385  constantLayer->GetOutputSlot(0).SetTensorInfo(indicesInfo);
1386  gatherLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1387 
1388  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1389  BOOST_CHECK(deserializedNetwork);
1390 
1391  GatherLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo}, descriptor);
1392  deserializedNetwork->Accept(verifier);
1393 }
1394 
1395 class GreaterLayerVerifier : public LayerVerifierBase
1396 {
1397 public:
1398  GreaterLayerVerifier(const std::string& layerName,
1399  const std::vector<armnn::TensorInfo>& inputInfos,
1400  const std::vector<armnn::TensorInfo>& outputInfos)
1401  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
1402 
1403  void VisitComparisonLayer(const armnn::IConnectableLayer* layer,
1404  const armnn::ComparisonDescriptor& descriptor,
1405  const char* name) override
1406  {
1407  VerifyNameAndConnections(layer, name);
1408  BOOST_CHECK(descriptor.m_Operation == armnn::ComparisonOperation::Greater);
1409  }
1410 
1411  void VisitGreaterLayer(const armnn::IConnectableLayer*, const char*) override
1412  {
1413  throw armnn::Exception("GreaterLayer should have translated to ComparisonLayer");
1414  }
1415 };
1416 
1417 // NOTE: Until the deprecated AddGreaterLayer disappears this test checks that calling
1418 // AddGreaterLayer places a ComparisonLayer into the serialized format and that
1419 // when this deserialises we have a ComparisonLayer
1420 BOOST_AUTO_TEST_CASE(SerializeGreater)
1421 {
1422  const std::string layerName("greater");
1423 
1424  const armnn::TensorShape shape{2, 1, 2, 4};
1425 
1428 
1430  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1431  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1433  armnn::IConnectableLayer* const equalLayer = network->AddGreaterLayer(layerName.c_str());
1435  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1436 
1437  inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
1438  inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
1439  equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1440 
1441  inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
1442  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
1443  equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1444 
1445  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1446  BOOST_CHECK(deserializedNetwork);
1447 
1448  GreaterLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
1449  deserializedNetwork->Accept(verifier);
1450 }
1451 
1452 BOOST_AUTO_TEST_CASE(EnsureGreaterBackwardCompatibility)
1453 {
1454  // The hex data below is a flat buffer containing a simple network with two inputs,
1455  // an GreaterLayer (now deprecated) and an output
1456  //
1457  // This test verifies that we can still deserialize this old-style model by replacing
1458  // the GreaterLayer with an equivalent ComparisonLayer
1459  const std::vector<uint8_t> greaterModel =
1460  {
1461  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1462  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1463  0xCC, 0x01, 0x00, 0x00, 0x20, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1464  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1465  0x60, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1466  0x00, 0x00, 0x06, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xEA, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1467  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1468  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1469  0x64, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xB4, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1470  0x00, 0x19, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1471  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1472  0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x67, 0x72, 0x65, 0x61, 0x74, 0x65, 0x72, 0x00, 0x02, 0x00, 0x00, 0x00,
1473  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1474  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x04, 0x08, 0x00, 0x00, 0x00,
1475  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1476  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1477  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1478  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1479  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1480  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1481  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1482  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1483  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1484  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1485  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1486  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1487  0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1488  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1489  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1490  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1491  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1492  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1493  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1494  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1495  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1496  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1497  0x02, 0x00, 0x00, 0x00
1498  };
1499 
1500  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(greaterModel.begin(), greaterModel.end()));
1501  BOOST_CHECK(deserializedNetwork);
1502 
1503  const armnn::TensorShape shape{ 1, 2, 2, 2 };
1504 
1507 
1508  GreaterLayerVerifier verifier("greater", { inputInfo, inputInfo }, { outputInfo });
1509  deserializedNetwork->Accept(verifier);
1510 }
1511 
1512 BOOST_AUTO_TEST_CASE(SerializeInstanceNormalization)
1513 {
1515 
1516  const std::string layerName("instanceNormalization");
1517  const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32);
1518 
1520  descriptor.m_Gamma = 1.1f;
1521  descriptor.m_Beta = 0.1f;
1522  descriptor.m_Eps = 0.0001f;
1523  descriptor.m_DataLayout = armnn::DataLayout::NHWC;
1524 
1526  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1527  armnn::IConnectableLayer* const instanceNormLayer =
1528  network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1529  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1530 
1531  inputLayer->GetOutputSlot(0).Connect(instanceNormLayer->GetInputSlot(0));
1532  instanceNormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1533 
1534  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1535  instanceNormLayer->GetOutputSlot(0).SetTensorInfo(info);
1536 
1537  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1538  BOOST_CHECK(deserializedNetwork);
1539 
1540  InstanceNormalizationLayerVerifier verifier(layerName, {info}, {info}, descriptor);
1541  deserializedNetwork->Accept(verifier);
1542 }
1543 
1545 
1546 BOOST_AUTO_TEST_CASE(SerializeL2Normalization)
1547 {
1548  const std::string l2NormLayerName("l2Normalization");
1549  const armnn::TensorInfo info({1, 2, 1, 5}, armnn::DataType::Float32);
1550 
1553  desc.m_Eps = 0.0001f;
1554 
1556  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1557  armnn::IConnectableLayer* const l2NormLayer = network->AddL2NormalizationLayer(desc, l2NormLayerName.c_str());
1558  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1559 
1560  inputLayer0->GetOutputSlot(0).Connect(l2NormLayer->GetInputSlot(0));
1561  l2NormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1562 
1563  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1564  l2NormLayer->GetOutputSlot(0).SetTensorInfo(info);
1565 
1566  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1567  BOOST_CHECK(deserializedNetwork);
1568 
1569  L2NormalizationLayerVerifier verifier(l2NormLayerName, {info}, {info}, desc);
1570  deserializedNetwork->Accept(verifier);
1571 }
1572 
1573 BOOST_AUTO_TEST_CASE(EnsureL2NormalizationBackwardCompatibility)
1574 {
1575  // The hex data below is a flat buffer containing a simple network with one input
1576  // a L2Normalization layer and an output layer with dimensions as per the tensor infos below.
1577  //
1578  // This test verifies that we can still read back these old style
1579  // models without the normalization epsilon value.
1580  const std::vector<uint8_t> l2NormalizationModel =
1581  {
1582  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1583  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1584  0x3C, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1585  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
1586  0x04, 0x00, 0x00, 0x00, 0xD6, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
1587  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
1588  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1589  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1590  0x4C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
1591  0x00, 0x20, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1592  0x20, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x06, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1593  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1594  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1F, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x20, 0x00,
1595  0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x6C, 0x32, 0x4E, 0x6F, 0x72, 0x6D, 0x61, 0x6C, 0x69, 0x7A, 0x61, 0x74,
1596  0x69, 0x6F, 0x6E, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00,
1597  0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1598  0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
1599  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
1600  0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1601  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1602  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1603  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1604  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1605  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1606  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1607  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1608  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1609  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1610  0x05, 0x00, 0x00, 0x00, 0x00
1611  };
1612 
1613  armnn::INetworkPtr deserializedNetwork =
1614  DeserializeNetwork(std::string(l2NormalizationModel.begin(), l2NormalizationModel.end()));
1615  BOOST_CHECK(deserializedNetwork);
1616 
1617  const std::string layerName("l2Normalization");
1618  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 2, 1, 5}, armnn::DataType::Float32);
1619 
1622  // Since this variable does not exist in the l2NormalizationModel dump, the default value will be loaded
1623  desc.m_Eps = 1e-12f;
1624 
1625  L2NormalizationLayerVerifier verifier(layerName, {inputInfo}, {inputInfo}, desc);
1626  deserializedNetwork->Accept(verifier);
1627 }
1628 
1629 BOOST_AUTO_TEST_CASE(SerializeLogSoftmax)
1630 {
1632 
1633  const std::string layerName("log_softmax");
1635 
1636  armnn::LogSoftmaxDescriptor descriptor;
1637  descriptor.m_Beta = 1.0f;
1638  descriptor.m_Axis = -1;
1639 
1641  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1642  armnn::IConnectableLayer* const logSoftmaxLayer = network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1643  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1644 
1645  inputLayer->GetOutputSlot(0).Connect(logSoftmaxLayer->GetInputSlot(0));
1646  logSoftmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1647 
1648  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
1649  logSoftmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
1650 
1651  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1652  BOOST_CHECK(deserializedNetwork);
1653 
1654  LogSoftmaxLayerVerifier verifier(layerName, {info}, {info}, descriptor);
1655  deserializedNetwork->Accept(verifier);
1656 }
1657 
1658 BOOST_AUTO_TEST_CASE(SerializeMaximum)
1659 {
1661 
1662  const std::string layerName("maximum");
1663  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1664 
1666  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1667  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1668  armnn::IConnectableLayer* const maximumLayer = network->AddMaximumLayer(layerName.c_str());
1669  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1670 
1671  inputLayer0->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(0));
1672  inputLayer1->GetOutputSlot(0).Connect(maximumLayer->GetInputSlot(1));
1673  maximumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1674 
1675  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1676  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1677  maximumLayer->GetOutputSlot(0).SetTensorInfo(info);
1678 
1679  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1680  BOOST_CHECK(deserializedNetwork);
1681 
1682  MaximumLayerVerifier verifier(layerName, {info, info}, {info});
1683  deserializedNetwork->Accept(verifier);
1684 }
1685 
1686 BOOST_AUTO_TEST_CASE(SerializeMean)
1687 {
1689 
1690  const std::string layerName("mean");
1691  const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
1692  const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
1693 
1694  armnn::MeanDescriptor descriptor;
1695  descriptor.m_Axis = { 2 };
1696  descriptor.m_KeepDims = true;
1697 
1699  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1700  armnn::IConnectableLayer* const meanLayer = network->AddMeanLayer(descriptor, layerName.c_str());
1701  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1702 
1703  inputLayer->GetOutputSlot(0).Connect(meanLayer->GetInputSlot(0));
1704  meanLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1705 
1706  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
1707  meanLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1708 
1709  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1710  BOOST_CHECK(deserializedNetwork);
1711 
1712  MeanLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
1713  deserializedNetwork->Accept(verifier);
1714 }
1715 
1716 BOOST_AUTO_TEST_CASE(SerializeMerge)
1717 {
1719 
1720  const std::string layerName("merge");
1721  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1722 
1724  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1725  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1726  armnn::IConnectableLayer* const mergeLayer = network->AddMergeLayer(layerName.c_str());
1727  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1728 
1729  inputLayer0->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(0));
1730  inputLayer1->GetOutputSlot(0).Connect(mergeLayer->GetInputSlot(1));
1731  mergeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1732 
1733  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1734  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1735  mergeLayer->GetOutputSlot(0).SetTensorInfo(info);
1736 
1737  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1738  BOOST_CHECK(deserializedNetwork);
1739 
1740  MergeLayerVerifier verifier(layerName, {info, info}, {info});
1741  deserializedNetwork->Accept(verifier);
1742 }
1743 
1744 class MergerLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>
1745 {
1746 public:
1747  MergerLayerVerifier(const std::string& layerName,
1748  const std::vector<armnn::TensorInfo>& inputInfos,
1749  const std::vector<armnn::TensorInfo>& outputInfos,
1750  const armnn::OriginsDescriptor& descriptor)
1751  : LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
1752 
1753  void VisitMergerLayer(const armnn::IConnectableLayer*,
1754  const armnn::OriginsDescriptor&,
1755  const char*) override
1756  {
1757  throw armnn::Exception("MergerLayer should have translated to ConcatLayer");
1758  }
1759 
1760  void VisitConcatLayer(const armnn::IConnectableLayer* layer,
1761  const armnn::OriginsDescriptor& descriptor,
1762  const char* name) override
1763  {
1764  VerifyNameAndConnections(layer, name);
1765  VerifyDescriptor(descriptor);
1766  }
1767 };
1768 
1769 // NOTE: Until the deprecated AddMergerLayer disappears this test checks that calling
1770 // AddMergerLayer places a ConcatLayer into the serialized format and that
1771 // when this deserialises we have a ConcatLayer
1772 BOOST_AUTO_TEST_CASE(SerializeMerger)
1773 {
1774  const std::string layerName("merger");
1775  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1776  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1777 
1778  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1779 
1780  armnn::OriginsDescriptor descriptor =
1781  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1782 
1784  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1785  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1787  armnn::IConnectableLayer* const mergerLayer = network->AddMergerLayer(descriptor, layerName.c_str());
1789  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1790 
1791  inputLayerOne->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
1792  inputLayerTwo->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
1793  mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1794 
1795  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1796  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1797  mergerLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1798 
1799  std::string mergerLayerNetwork = SerializeNetwork(*network);
1800  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(mergerLayerNetwork);
1801  BOOST_CHECK(deserializedNetwork);
1802 
1803  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1804  deserializedNetwork->Accept(verifier);
1805 }
1806 
1807 BOOST_AUTO_TEST_CASE(EnsureMergerLayerBackwardCompatibility)
1808 {
1809  // The hex data below is a flat buffer containing a simple network with two inputs
1810  // a merger layer (now deprecated) and an output layer with dimensions as per the tensor infos below.
1811  //
1812  // This test verifies that we can still read back these old style
1813  // models replacing the MergerLayers with ConcatLayers with the same parameters.
1814  const std::vector<uint8_t> mergerModel =
1815  {
1816  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
1817  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1818  0x38, 0x02, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
1819  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
1820  0xF4, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
1821  0x00, 0x00, 0x9A, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
1822  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
1823  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1824  0xF8, 0xFE, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
1825  0x00, 0x1F, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1826  0x68, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
1827  0x0C, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
1828  0x02, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x22, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
1829  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1830  0x00, 0x00, 0x00, 0x00, 0x3E, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
1831  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
1832  0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
1833  0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x6D, 0x65, 0x72, 0x67, 0x65, 0x72, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1834  0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
1835  0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
1836  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00,
1837  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
1838  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
1839  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1840  0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
1841  0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
1842  0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
1843  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
1844  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
1845  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
1846  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
1847  0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1848  0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
1849  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
1850  0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
1851  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
1852  0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
1853  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1854  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
1855  0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
1856  0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
1857  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
1858  0x02, 0x00, 0x00, 0x00
1859  };
1860 
1861  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(mergerModel.begin(), mergerModel.end()));
1862  BOOST_CHECK(deserializedNetwork);
1863 
1864  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 2, 3, 2, 2 }, armnn::DataType::Float32);
1865  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 4, 3, 2, 2 }, armnn::DataType::Float32);
1866 
1867  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1868 
1869  armnn::OriginsDescriptor descriptor =
1870  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1871 
1872  MergerLayerVerifier verifier("merger", { inputInfo, inputInfo }, { outputInfo }, descriptor);
1873  deserializedNetwork->Accept(verifier);
1874 }
1875 
1876 BOOST_AUTO_TEST_CASE(SerializeConcat)
1877 {
1878  const std::string layerName("concat");
1879  const armnn::TensorInfo inputInfo = armnn::TensorInfo({2, 3, 2, 2}, armnn::DataType::Float32);
1880  const armnn::TensorInfo outputInfo = armnn::TensorInfo({4, 3, 2, 2}, armnn::DataType::Float32);
1881 
1882  const std::vector<armnn::TensorShape> shapes({inputInfo.GetShape(), inputInfo.GetShape()});
1883 
1884  armnn::OriginsDescriptor descriptor =
1885  armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
1886 
1888  armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
1889  armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
1890  armnn::IConnectableLayer* const concatLayer = network->AddConcatLayer(descriptor, layerName.c_str());
1891  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1892 
1893  inputLayerOne->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
1894  inputLayerTwo->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
1895  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1896 
1897  inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputInfo);
1898  inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputInfo);
1899  concatLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1900 
1901  std::string concatLayerNetwork = SerializeNetwork(*network);
1902  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(concatLayerNetwork);
1903  BOOST_CHECK(deserializedNetwork);
1904 
1905  // NOTE: using the MergerLayerVerifier to ensure that it is a concat layer and not a
1906  // merger layer that gets placed into the graph.
1907  MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
1908  deserializedNetwork->Accept(verifier);
1909 }
1910 
1911 BOOST_AUTO_TEST_CASE(SerializeMinimum)
1912 {
1914 
1915  const std::string layerName("minimum");
1916  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
1917 
1919  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1920  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1921  armnn::IConnectableLayer* const minimumLayer = network->AddMinimumLayer(layerName.c_str());
1922  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1923 
1924  inputLayer0->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(0));
1925  inputLayer1->GetOutputSlot(0).Connect(minimumLayer->GetInputSlot(1));
1926  minimumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1927 
1928  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1929  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1930  minimumLayer->GetOutputSlot(0).SetTensorInfo(info);
1931 
1932  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1933  BOOST_CHECK(deserializedNetwork);
1934 
1935  MinimumLayerVerifier verifier(layerName, {info, info}, {info});
1936  deserializedNetwork->Accept(verifier);
1937 }
1938 
1939 BOOST_AUTO_TEST_CASE(SerializeMultiplication)
1940 {
1942 
1943  const std::string layerName("multiplication");
1944  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
1945 
1947  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
1948  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
1949  armnn::IConnectableLayer* const multiplicationLayer = network->AddMultiplicationLayer(layerName.c_str());
1950  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1951 
1952  inputLayer0->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
1953  inputLayer1->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
1954  multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1955 
1956  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
1957  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
1958  multiplicationLayer->GetOutputSlot(0).SetTensorInfo(info);
1959 
1960  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1961  BOOST_CHECK(deserializedNetwork);
1962 
1963  MultiplicationLayerVerifier verifier(layerName, {info, info}, {info});
1964  deserializedNetwork->Accept(verifier);
1965 }
1966 
1967 BOOST_AUTO_TEST_CASE(SerializePrelu)
1968 {
1970 
1971  const std::string layerName("prelu");
1972 
1973  armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32);
1974  armnn::TensorInfo alphaTensorInfo ({ 5, 4, 3, 1 }, armnn::DataType::Float32);
1975  armnn::TensorInfo outputTensorInfo({ 5, 4, 3, 2 }, armnn::DataType::Float32);
1976 
1978  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
1979  armnn::IConnectableLayer* const alphaLayer = network->AddInputLayer(1);
1980  armnn::IConnectableLayer* const preluLayer = network->AddPreluLayer(layerName.c_str());
1981  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
1982 
1983  inputLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(0));
1984  alphaLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(1));
1985  preluLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1986 
1987  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
1988  alphaLayer->GetOutputSlot(0).SetTensorInfo(alphaTensorInfo);
1989  preluLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1990 
1991  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
1992  BOOST_CHECK(deserializedNetwork);
1993 
1994  PreluLayerVerifier verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
1995  deserializedNetwork->Accept(verifier);
1996 }
1997 
1998 BOOST_AUTO_TEST_CASE(SerializeNormalization)
1999 {
2001 
2002  const std::string layerName("normalization");
2003  const armnn::TensorInfo info({2, 1, 2, 2}, armnn::DataType::Float32);
2004 
2007  desc.m_NormSize = 3;
2008  desc.m_Alpha = 1;
2009  desc.m_Beta = 1;
2010  desc.m_K = 1;
2011 
2013  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2014  armnn::IConnectableLayer* const normalizationLayer = network->AddNormalizationLayer(desc, layerName.c_str());
2015  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2016 
2017  inputLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
2018  normalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2019 
2020  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2021  normalizationLayer->GetOutputSlot(0).SetTensorInfo(info);
2022 
2023  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2024  BOOST_CHECK(deserializedNetwork);
2025 
2026  NormalizationLayerVerifier verifier(layerName, {info}, {info}, desc);
2027  deserializedNetwork->Accept(verifier);
2028 }
2029 
2031 
2033 {
2034  const std::string layerName("pad");
2035  const armnn::TensorInfo inputTensorInfo = armnn::TensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
2036  const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({1, 3, 5, 7}, armnn::DataType::Float32);
2037 
2038  armnn::PadDescriptor desc({{0, 0}, {1, 0}, {1, 1}, {1, 2}});
2039 
2041  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2042  armnn::IConnectableLayer* const padLayer = network->AddPadLayer(desc, layerName.c_str());
2043  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2044 
2045  inputLayer->GetOutputSlot(0).Connect(padLayer->GetInputSlot(0));
2046  padLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2047 
2048  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2049  padLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2050 
2051  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2052  BOOST_CHECK(deserializedNetwork);
2053 
2054  PadLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, desc);
2055  deserializedNetwork->Accept(verifier);
2056 }
2057 
2058 BOOST_AUTO_TEST_CASE(EnsurePadBackwardCompatibility)
2059 {
2060  // The PadDescriptor is being extended with a float PadValue (so a value other than 0
2061  // can be used to pad the tensor.
2062  //
2063  // This test contains a binary representation of a simple input->pad->output network
2064  // prior to this change to test that the descriptor has been updated in a backward
2065  // compatible way with respect to Deserialization of older binary dumps
2066  const std::vector<uint8_t> padModel =
2067  {
2068  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
2069  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
2070  0x54, 0x01, 0x00, 0x00, 0x6C, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
2071  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD0, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
2072  0x04, 0x00, 0x00, 0x00, 0x96, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x9E, 0xFF, 0xFF, 0xFF, 0x04, 0x00,
2073  0x00, 0x00, 0x72, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
2074  0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
2075  0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2C, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00,
2076  0x00, 0x00, 0x00, 0x00, 0x24, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x16, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00,
2077  0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x4C, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
2078  0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00,
2079  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2080  0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
2081  0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
2082  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00,
2083  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x70, 0x61, 0x64, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00,
2084  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
2085  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
2086  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
2087  0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
2088  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00,
2089  0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00,
2090  0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
2091  0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00,
2092  0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
2093  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
2094  0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
2095  0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
2096  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
2097  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00
2098  };
2099 
2100  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(padModel.begin(), padModel.end()));
2101  BOOST_CHECK(deserializedNetwork);
2102 
2103  const armnn::TensorInfo inputInfo = armnn::TensorInfo({ 1, 2, 3, 4 }, armnn::DataType::Float32);
2104  const armnn::TensorInfo outputInfo = armnn::TensorInfo({ 1, 3, 5, 7 }, armnn::DataType::Float32);
2105 
2106  armnn::PadDescriptor descriptor({{ 0, 0 }, { 1, 0 }, { 1, 1 }, { 1, 2 }});
2107 
2108  PadLayerVerifier verifier("pad", { inputInfo }, { outputInfo }, descriptor);
2109  deserializedNetwork->Accept(verifier);
2110 }
2111 
2112 BOOST_AUTO_TEST_CASE(SerializePermute)
2113 {
2115 
2116  const std::string layerName("permute");
2117  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
2118  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
2119 
2120  armnn::PermuteDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
2121 
2123  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2124  armnn::IConnectableLayer* const permuteLayer = network->AddPermuteLayer(descriptor, layerName.c_str());
2125  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2126 
2127  inputLayer->GetOutputSlot(0).Connect(permuteLayer->GetInputSlot(0));
2128  permuteLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2129 
2130  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2131  permuteLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2132 
2133  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2134  BOOST_CHECK(deserializedNetwork);
2135 
2136  PermuteLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
2137  deserializedNetwork->Accept(verifier);
2138 }
2139 
2140 BOOST_AUTO_TEST_CASE(SerializePooling2d)
2141 {
2143 
2144  const std::string layerName("pooling2d");
2145  const armnn::TensorInfo inputInfo({1, 2, 2, 1}, armnn::DataType::Float32);
2146  const armnn::TensorInfo outputInfo({1, 1, 1, 1}, armnn::DataType::Float32);
2147 
2150  desc.m_PadTop = 0;
2151  desc.m_PadBottom = 0;
2152  desc.m_PadLeft = 0;
2153  desc.m_PadRight = 0;
2154  desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2155  desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2156  desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2157  desc.m_PoolHeight = 2;
2158  desc.m_PoolWidth = 2;
2159  desc.m_StrideX = 2;
2160  desc.m_StrideY = 2;
2161 
2163  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2164  armnn::IConnectableLayer* const pooling2dLayer = network->AddPooling2dLayer(desc, layerName.c_str());
2165  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2166 
2167  inputLayer->GetOutputSlot(0).Connect(pooling2dLayer->GetInputSlot(0));
2168  pooling2dLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2169 
2170  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2171  pooling2dLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2172 
2173  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2174  BOOST_CHECK(deserializedNetwork);
2175 
2176  Pooling2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2177  deserializedNetwork->Accept(verifier);
2178 }
2179 
2180 BOOST_AUTO_TEST_CASE(SerializeQuantize)
2181 {
2183 
2184  const std::string layerName("quantize");
2185  const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
2186 
2188  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2189  armnn::IConnectableLayer* const quantizeLayer = network->AddQuantizeLayer(layerName.c_str());
2190  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2191 
2192  inputLayer->GetOutputSlot(0).Connect(quantizeLayer->GetInputSlot(0));
2193  quantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2194 
2195  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2196  quantizeLayer->GetOutputSlot(0).SetTensorInfo(info);
2197 
2198  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2199  BOOST_CHECK(deserializedNetwork);
2200 
2201  QuantizeLayerVerifier verifier(layerName, {info}, {info});
2202  deserializedNetwork->Accept(verifier);
2203 }
2204 
2205 BOOST_AUTO_TEST_CASE(SerializeRank)
2206 {
2208 
2209  const std::string layerName("rank");
2210  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
2211  const armnn::TensorInfo outputInfo({1}, armnn::DataType::Signed32);
2212 
2214  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2215  armnn::IConnectableLayer* const rankLayer = network->AddRankLayer(layerName.c_str());
2216  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2217 
2218  inputLayer->GetOutputSlot(0).Connect(rankLayer->GetInputSlot(0));
2219  rankLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2220 
2221  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2222  rankLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2223 
2224  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2225  BOOST_CHECK(deserializedNetwork);
2226 
2227  RankLayerVerifier verifier(layerName, {inputInfo}, {outputInfo});
2228  deserializedNetwork->Accept(verifier);
2229 }
2230 
2231 BOOST_AUTO_TEST_CASE(SerializeReshape)
2232 {
2234 
2235  const std::string layerName("reshape");
2236  const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
2237  const armnn::TensorInfo outputInfo({3, 3}, armnn::DataType::Float32);
2238 
2239  armnn::ReshapeDescriptor descriptor({3, 3});
2240 
2242  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2243  armnn::IConnectableLayer* const reshapeLayer = network->AddReshapeLayer(descriptor, layerName.c_str());
2244  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2245 
2246  inputLayer->GetOutputSlot(0).Connect(reshapeLayer->GetInputSlot(0));
2247  reshapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2248 
2249  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2250  reshapeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2251 
2252  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2253  BOOST_CHECK(deserializedNetwork);
2254 
2255  ReshapeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2256  deserializedNetwork->Accept(verifier);
2257 }
2258 
2259 BOOST_AUTO_TEST_CASE(SerializeResize)
2260 {
2262 
2263  const std::string layerName("resize");
2264  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2265  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2266 
2268  desc.m_TargetWidth = 4;
2269  desc.m_TargetHeight = 2;
2270  desc.m_Method = armnn::ResizeMethod::NearestNeighbor;
2271  desc.m_AlignCorners = true;
2272  desc.m_HalfPixelCenters = true;
2273 
2275  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2276  armnn::IConnectableLayer* const resizeLayer = network->AddResizeLayer(desc, layerName.c_str());
2277  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2278 
2279  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2280  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2281 
2282  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2283  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2284 
2285  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2286  BOOST_CHECK(deserializedNetwork);
2287 
2288  ResizeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2289  deserializedNetwork->Accept(verifier);
2290 }
2291 
2292 class ResizeBilinearLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::ResizeBilinearDescriptor>
2293 {
2294 public:
2295  ResizeBilinearLayerVerifier(const std::string& layerName,
2296  const std::vector<armnn::TensorInfo>& inputInfos,
2297  const std::vector<armnn::TensorInfo>& outputInfos,
2298  const armnn::ResizeBilinearDescriptor& descriptor)
2299  : LayerVerifierBaseWithDescriptor<armnn::ResizeBilinearDescriptor>(
2300  layerName, inputInfos, outputInfos, descriptor) {}
2301 
2302  void VisitResizeLayer(const armnn::IConnectableLayer* layer,
2303  const armnn::ResizeDescriptor& descriptor,
2304  const char* name) override
2305  {
2306  VerifyNameAndConnections(layer, name);
2307 
2308  BOOST_CHECK(descriptor.m_Method == armnn::ResizeMethod::Bilinear);
2309  BOOST_CHECK(descriptor.m_TargetWidth == m_Descriptor.m_TargetWidth);
2310  BOOST_CHECK(descriptor.m_TargetHeight == m_Descriptor.m_TargetHeight);
2311  BOOST_CHECK(descriptor.m_DataLayout == m_Descriptor.m_DataLayout);
2312  BOOST_CHECK(descriptor.m_AlignCorners == m_Descriptor.m_AlignCorners);
2313  BOOST_CHECK(descriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters);
2314  }
2315 
2316  void VisitResizeBilinearLayer(const armnn::IConnectableLayer*,
2318  const char*) override
2319  {
2320  throw armnn::Exception("ResizeBilinearLayer should have translated to ResizeLayer");
2321  }
2322 };
2323 
2324 // NOTE: Until the deprecated AddResizeBilinearLayer disappears this test checks that
2325 // calling AddResizeBilinearLayer places a ResizeLayer into the serialized format
2326 // and that when this deserialises we have a ResizeLayer
2327 BOOST_AUTO_TEST_CASE(SerializeResizeBilinear)
2328 {
2329  const std::string layerName("resizeBilinear");
2330  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2331  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2332 
2334  desc.m_TargetWidth = 4u;
2335  desc.m_TargetHeight = 2u;
2336  desc.m_AlignCorners = true;
2337  desc.m_HalfPixelCenters = true;
2338 
2340  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2342  armnn::IConnectableLayer* const resizeLayer = network->AddResizeBilinearLayer(desc, layerName.c_str());
2344  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2345 
2346  inputLayer->GetOutputSlot(0).Connect(resizeLayer->GetInputSlot(0));
2347  resizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2348 
2349  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2350  resizeLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2351 
2352  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2353  BOOST_CHECK(deserializedNetwork);
2354 
2355  ResizeBilinearLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2356  deserializedNetwork->Accept(verifier);
2357 }
2358 
2359 BOOST_AUTO_TEST_CASE(EnsureResizeBilinearBackwardCompatibility)
2360 {
2361  // The hex data below is a flat buffer containing a simple network with an input,
2362  // a ResizeBilinearLayer (now deprecated) and an output
2363  //
2364  // This test verifies that we can still deserialize this old-style model by replacing
2365  // the ResizeBilinearLayer with an equivalent ResizeLayer
2366  const std::vector<uint8_t> resizeBilinearModel =
2367  {
2368  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
2369  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
2370  0x50, 0x01, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
2371  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xD4, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
2372  0x04, 0x00, 0x00, 0x00, 0xC2, 0xFE, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00,
2373  0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x8A, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
2374  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
2375  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2376  0x38, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
2377  0x00, 0x1A, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
2378  0x34, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x12, 0x00, 0x08, 0x00, 0x0C, 0x00,
2379  0x07, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
2380  0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00,
2381  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00,
2382  0x20, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x69, 0x7A, 0x65, 0x42, 0x69, 0x6C, 0x69,
2383  0x6E, 0x65, 0x61, 0x72, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
2384  0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
2385  0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2386  0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
2387  0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2388  0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
2389  0x00, 0x09, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
2390  0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00,
2391  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
2392  0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2393  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00,
2394  0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00,
2395  0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
2396  0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x05, 0x00,
2397  0x00, 0x00, 0x05, 0x00, 0x00, 0x00
2398  };
2399 
2400  armnn::INetworkPtr deserializedNetwork =
2401  DeserializeNetwork(std::string(resizeBilinearModel.begin(), resizeBilinearModel.end()));
2402  BOOST_CHECK(deserializedNetwork);
2403 
2404  const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
2405  const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
2406 
2408  descriptor.m_TargetWidth = 4u;
2409  descriptor.m_TargetHeight = 2u;
2410 
2411  ResizeBilinearLayerVerifier verifier("resizeBilinear", { inputInfo }, { outputInfo }, descriptor);
2412  deserializedNetwork->Accept(verifier);
2413 }
2414 
2415 BOOST_AUTO_TEST_CASE(SerializeSlice)
2416 {
2418 
2419  const std::string layerName{"slice"};
2420 
2421  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2422  const armnn::TensorInfo outputInfo = armnn::TensorInfo({2, 2, 2, 1}, armnn::DataType::Float32);
2423 
2424  armnn::SliceDescriptor descriptor({ 0, 0, 1, 0}, {2, 2, 2, 1});
2425 
2427 
2428  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2429  armnn::IConnectableLayer* const sliceLayer = network->AddSliceLayer(descriptor, layerName.c_str());
2430  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2431 
2432  inputLayer->GetOutputSlot(0).Connect(sliceLayer->GetInputSlot(0));
2433  sliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2434 
2435  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2436  sliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2437 
2438  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2439  BOOST_CHECK(deserializedNetwork);
2440 
2441  SliceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
2442  deserializedNetwork->Accept(verifier);
2443 }
2444 
2445 BOOST_AUTO_TEST_CASE(SerializeSoftmax)
2446 {
2448 
2449  const std::string layerName("softmax");
2451 
2452  armnn::SoftmaxDescriptor descriptor;
2453  descriptor.m_Beta = 1.0f;
2454 
2456  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2457  armnn::IConnectableLayer* const softmaxLayer = network->AddSoftmaxLayer(descriptor, layerName.c_str());
2458  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2459 
2460  inputLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
2461  softmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2462 
2463  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2464  softmaxLayer->GetOutputSlot(0).SetTensorInfo(info);
2465 
2466  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2467  BOOST_CHECK(deserializedNetwork);
2468 
2469  SoftmaxLayerVerifier verifier(layerName, {info}, {info}, descriptor);
2470  deserializedNetwork->Accept(verifier);
2471 }
2472 
2473 BOOST_AUTO_TEST_CASE(SerializeSpaceToBatchNd)
2474 {
2476 
2477  const std::string layerName("spaceToBatchNd");
2478  const armnn::TensorInfo inputInfo({2, 1, 2, 4}, armnn::DataType::Float32);
2479  const armnn::TensorInfo outputInfo({8, 1, 1, 3}, armnn::DataType::Float32);
2480 
2483  desc.m_BlockShape = {2, 2};
2484  desc.m_PadList = {{0, 0}, {2, 0}};
2485 
2487  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2488  armnn::IConnectableLayer* const spaceToBatchNdLayer = network->AddSpaceToBatchNdLayer(desc, layerName.c_str());
2489  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2490 
2491  inputLayer->GetOutputSlot(0).Connect(spaceToBatchNdLayer->GetInputSlot(0));
2492  spaceToBatchNdLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2493 
2494  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2495  spaceToBatchNdLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2496 
2497  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2498  BOOST_CHECK(deserializedNetwork);
2499 
2500  SpaceToBatchNdLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2501  deserializedNetwork->Accept(verifier);
2502 }
2503 
2504 BOOST_AUTO_TEST_CASE(SerializeSpaceToDepth)
2505 {
2507 
2508  const std::string layerName("spaceToDepth");
2509 
2510  const armnn::TensorInfo inputInfo ({ 1, 16, 8, 3 }, armnn::DataType::Float32);
2511  const armnn::TensorInfo outputInfo({ 1, 8, 4, 12 }, armnn::DataType::Float32);
2512 
2514  desc.m_BlockSize = 2;
2515  desc.m_DataLayout = armnn::DataLayout::NHWC;
2516 
2518  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2519  armnn::IConnectableLayer* const spaceToDepthLayer = network->AddSpaceToDepthLayer(desc, layerName.c_str());
2520  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2521 
2522  inputLayer->GetOutputSlot(0).Connect(spaceToDepthLayer->GetInputSlot(0));
2523  spaceToDepthLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2524 
2525  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2526  spaceToDepthLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2527 
2528  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2529  BOOST_CHECK(deserializedNetwork);
2530 
2531  SpaceToDepthLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2532  deserializedNetwork->Accept(verifier);
2533 }
2534 
2535 BOOST_AUTO_TEST_CASE(SerializeSplitter)
2536 {
2538 
2539  const unsigned int numViews = 3;
2540  const unsigned int numDimensions = 4;
2541  const unsigned int inputShape[] = {1, 18, 4, 4};
2542  const unsigned int outputShape[] = {1, 6, 4, 4};
2543 
2544  // This is modelled on how the caffe parser sets up a splitter layer to partition an input along dimension one.
2545  unsigned int splitterDimSizes[4] = {static_cast<unsigned int>(inputShape[0]),
2546  static_cast<unsigned int>(inputShape[1]),
2547  static_cast<unsigned int>(inputShape[2]),
2548  static_cast<unsigned int>(inputShape[3])};
2549  splitterDimSizes[1] /= numViews;
2550  armnn::ViewsDescriptor desc(numViews, numDimensions);
2551 
2552  for (unsigned int g = 0; g < numViews; ++g)
2553  {
2554  desc.SetViewOriginCoord(g, 1, splitterDimSizes[1] * g);
2555 
2556  for (unsigned int dimIdx=0; dimIdx < 4; dimIdx++)
2557  {
2558  desc.SetViewSize(g, dimIdx, splitterDimSizes[dimIdx]);
2559  }
2560  }
2561 
2562  const std::string layerName("splitter");
2563  const armnn::TensorInfo inputInfo(numDimensions, inputShape, armnn::DataType::Float32);
2564  const armnn::TensorInfo outputInfo(numDimensions, outputShape, armnn::DataType::Float32);
2565 
2567  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2568  armnn::IConnectableLayer* const splitterLayer = network->AddSplitterLayer(desc, layerName.c_str());
2569  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2570  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2571  armnn::IConnectableLayer* const outputLayer2 = network->AddOutputLayer(2);
2572 
2573  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
2574  splitterLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2575  splitterLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2576  splitterLayer->GetOutputSlot(2).Connect(outputLayer2->GetInputSlot(0));
2577 
2578  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2579  splitterLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2580  splitterLayer->GetOutputSlot(1).SetTensorInfo(outputInfo);
2581  splitterLayer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2582 
2583  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2584  BOOST_CHECK(deserializedNetwork);
2585 
2586  SplitterLayerVerifier verifier(layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc);
2587  deserializedNetwork->Accept(verifier);
2588 }
2589 
2590 BOOST_AUTO_TEST_CASE(SerializeStack)
2591 {
2593 
2594  const std::string layerName("stack");
2595 
2596  armnn::TensorInfo inputTensorInfo ({4, 3, 5}, armnn::DataType::Float32);
2597  armnn::TensorInfo outputTensorInfo({4, 3, 2, 5}, armnn::DataType::Float32);
2598 
2599  armnn::StackDescriptor descriptor(2, 2, {4, 3, 5});
2600 
2602  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
2603  armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
2604  armnn::IConnectableLayer* const stackLayer = network->AddStackLayer(descriptor, layerName.c_str());
2605  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2606 
2607  inputLayer1->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(0));
2608  inputLayer2->GetOutputSlot(0).Connect(stackLayer->GetInputSlot(1));
2609  stackLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2610 
2611  inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2612  inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2613  stackLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2614 
2615  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2616  BOOST_CHECK(deserializedNetwork);
2617 
2618  StackLayerVerifier verifier(layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor);
2619  deserializedNetwork->Accept(verifier);
2620 }
2621 
2622 BOOST_AUTO_TEST_CASE(SerializeStandIn)
2623 {
2625 
2626  const std::string layerName("standIn");
2627 
2628  armnn::TensorInfo tensorInfo({ 1u }, armnn::DataType::Float32);
2629  armnn::StandInDescriptor descriptor(2u, 2u);
2630 
2632  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2633  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2634  armnn::IConnectableLayer* const standInLayer = network->AddStandInLayer(descriptor, layerName.c_str());
2635  armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
2636  armnn::IConnectableLayer* const outputLayer1 = network->AddOutputLayer(1);
2637 
2638  inputLayer0->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(0));
2639  inputLayer0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2640 
2641  inputLayer1->GetOutputSlot(0).Connect(standInLayer->GetInputSlot(1));
2642  inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2643 
2644  standInLayer->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
2645  standInLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
2646 
2647  standInLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2648  standInLayer->GetOutputSlot(1).SetTensorInfo(tensorInfo);
2649 
2650  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2651  BOOST_CHECK(deserializedNetwork);
2652 
2653  StandInLayerVerifier verifier(layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor);
2654  deserializedNetwork->Accept(verifier);
2655 }
2656 
2657 BOOST_AUTO_TEST_CASE(SerializeStridedSlice)
2658 {
2660 
2661  const std::string layerName("stridedSlice");
2662  const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
2663  const armnn::TensorInfo outputInfo = armnn::TensorInfo({3, 1}, armnn::DataType::Float32);
2664 
2665  armnn::StridedSliceDescriptor desc({0, 0, 1, 0}, {1, 1, 1, 1}, {1, 1, 1, 1});
2666  desc.m_EndMask = (1 << 4) - 1;
2667  desc.m_ShrinkAxisMask = (1 << 1) | (1 << 2);
2668  desc.m_DataLayout = armnn::DataLayout::NCHW;
2669 
2671  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2672  armnn::IConnectableLayer* const stridedSliceLayer = network->AddStridedSliceLayer(desc, layerName.c_str());
2673  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2674 
2675  inputLayer->GetOutputSlot(0).Connect(stridedSliceLayer->GetInputSlot(0));
2676  stridedSliceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2677 
2678  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2679  stridedSliceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2680 
2681  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2682  BOOST_CHECK(deserializedNetwork);
2683 
2684  StridedSliceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
2685  deserializedNetwork->Accept(verifier);
2686 }
2687 
2688 BOOST_AUTO_TEST_CASE(SerializeSubtraction)
2689 {
2691 
2692  const std::string layerName("subtraction");
2694 
2696  armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
2697  armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
2698  armnn::IConnectableLayer* const subtractionLayer = network->AddSubtractionLayer(layerName.c_str());
2699  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2700 
2701  inputLayer0->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(0));
2702  inputLayer1->GetOutputSlot(0).Connect(subtractionLayer->GetInputSlot(1));
2703  subtractionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2704 
2705  inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
2706  inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
2707  subtractionLayer->GetOutputSlot(0).SetTensorInfo(info);
2708 
2709  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2710  BOOST_CHECK(deserializedNetwork);
2711 
2712  SubtractionLayerVerifier verifier(layerName, {info, info}, {info});
2713  deserializedNetwork->Accept(verifier);
2714 }
2715 
2716 BOOST_AUTO_TEST_CASE(SerializeSwitch)
2717 {
2718  class SwitchLayerVerifier : public LayerVerifierBase
2719  {
2720  public:
2721  SwitchLayerVerifier(const std::string& layerName,
2722  const std::vector<armnn::TensorInfo>& inputInfos,
2723  const std::vector<armnn::TensorInfo>& outputInfos)
2724  : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
2725 
2726  void VisitSwitchLayer(const armnn::IConnectableLayer* layer, const char* name) override
2727  {
2728  VerifyNameAndConnections(layer, name);
2729  }
2730 
2731  void VisitConstantLayer(const armnn::IConnectableLayer*,
2732  const armnn::ConstTensor&,
2733  const char*) override {}
2734  };
2735 
2736  const std::string layerName("switch");
2738 
2739  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2740  armnn::ConstTensor constTensor(info, constantData);
2741 
2743  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2744  armnn::IConnectableLayer* const constantLayer = network->AddConstantLayer(constTensor, "constant");
2745  armnn::IConnectableLayer* const switchLayer = network->AddSwitchLayer(layerName.c_str());
2746  armnn::IConnectableLayer* const trueOutputLayer = network->AddOutputLayer(0);
2747  armnn::IConnectableLayer* const falseOutputLayer = network->AddOutputLayer(1);
2748 
2749  inputLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(0));
2750  constantLayer->GetOutputSlot(0).Connect(switchLayer->GetInputSlot(1));
2751  switchLayer->GetOutputSlot(0).Connect(trueOutputLayer->GetInputSlot(0));
2752  switchLayer->GetOutputSlot(1).Connect(falseOutputLayer->GetInputSlot(0));
2753 
2754  inputLayer->GetOutputSlot(0).SetTensorInfo(info);
2755  constantLayer->GetOutputSlot(0).SetTensorInfo(info);
2756  switchLayer->GetOutputSlot(0).SetTensorInfo(info);
2757  switchLayer->GetOutputSlot(1).SetTensorInfo(info);
2758 
2759  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2760  BOOST_CHECK(deserializedNetwork);
2761 
2762  SwitchLayerVerifier verifier(layerName, {info, info}, {info, info});
2763  deserializedNetwork->Accept(verifier);
2764 }
2765 
2766 BOOST_AUTO_TEST_CASE(SerializeTranspose)
2767 {
2769 
2770  const std::string layerName("transpose");
2771  const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
2772  const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
2773 
2774  armnn::TransposeDescriptor descriptor(armnn::PermutationVector({3, 2, 1, 0}));
2775 
2777  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2778  armnn::IConnectableLayer* const transposeLayer = network->AddTransposeLayer(descriptor, layerName.c_str());
2779  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2780 
2781  inputLayer->GetOutputSlot(0).Connect(transposeLayer->GetInputSlot(0));
2782  transposeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2783 
2784  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
2785  transposeLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2786 
2787  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2788  BOOST_CHECK(deserializedNetwork);
2789 
2790  TransposeLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
2791  deserializedNetwork->Accept(verifier);
2792 }
2793 
2794 BOOST_AUTO_TEST_CASE(SerializeTransposeConvolution2d)
2795 {
2796  using Descriptor = armnn::TransposeConvolution2dDescriptor;
2797  class TransposeConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
2798  {
2799  public:
2800  TransposeConvolution2dLayerVerifier(const std::string& layerName,
2801  const std::vector<armnn::TensorInfo>& inputInfos,
2802  const std::vector<armnn::TensorInfo>& outputInfos,
2803  const Descriptor& descriptor,
2804  const armnn::ConstTensor& weights,
2806  : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
2807  , m_Weights(weights)
2808  , m_Biases(biases)
2809  {}
2810 
2811  void VisitTransposeConvolution2dLayer(const armnn::IConnectableLayer* layer,
2812  const Descriptor& descriptor,
2813  const armnn::ConstTensor& weights,
2815  const char* name) override
2816  {
2817  VerifyNameAndConnections(layer, name);
2818  VerifyDescriptor(descriptor);
2819 
2820  // check weights
2821  CompareConstTensor(weights, m_Weights);
2822 
2823  // check biases
2824  BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
2825  BOOST_CHECK(biases.has_value() == m_Biases.has_value());
2826 
2827  if (biases.has_value() && m_Biases.has_value())
2828  {
2829  CompareConstTensor(biases.value(), m_Biases.value());
2830  }
2831  }
2832 
2833  private:
2834  armnn::ConstTensor m_Weights;
2836  };
2837 
2838  const std::string layerName("transposeConvolution2d");
2839  const armnn::TensorInfo inputInfo ({ 1, 7, 7, 1 }, armnn::DataType::Float32);
2840  const armnn::TensorInfo outputInfo({ 1, 9, 9, 1 }, armnn::DataType::Float32);
2841 
2842  const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
2843  const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32);
2844 
2845  std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements());
2846  armnn::ConstTensor weights(weightsInfo, weightsData);
2847 
2848  std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements());
2849  armnn::ConstTensor biases(biasesInfo, biasesData);
2850 
2852  descriptor.m_PadLeft = 1;
2853  descriptor.m_PadRight = 1;
2854  descriptor.m_PadTop = 1;
2855  descriptor.m_PadBottom = 1;
2856  descriptor.m_StrideX = 1;
2857  descriptor.m_StrideY = 1;
2858  descriptor.m_BiasEnabled = true;
2860 
2862  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
2863  armnn::IConnectableLayer* const convLayer =
2864  network->AddTransposeConvolution2dLayer(descriptor,
2865  weights,
2867  layerName.c_str());
2868  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
2869 
2870  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2871  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
2872 
2873  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
2874  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2875 
2876  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2877  BOOST_CHECK(deserializedNetwork);
2878 
2879  TransposeConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
2880  deserializedNetwork->Accept(verifier);
2881 }
2882 
2883 BOOST_AUTO_TEST_CASE(SerializeDeserializeNonLinearNetwork)
2884 {
2885  class ConstantLayerVerifier : public LayerVerifierBase
2886  {
2887  public:
2888  ConstantLayerVerifier(const std::string& layerName,
2889  const std::vector<armnn::TensorInfo>& inputInfos,
2890  const std::vector<armnn::TensorInfo>& outputInfos,
2891  const armnn::ConstTensor& layerInput)
2892  : LayerVerifierBase(layerName, inputInfos, outputInfos)
2893  , m_LayerInput(layerInput) {}
2894 
2895  void VisitConstantLayer(const armnn::IConnectableLayer* layer,
2896  const armnn::ConstTensor& input,
2897  const char* name) override
2898  {
2899  VerifyNameAndConnections(layer, name);
2900  CompareConstTensor(input, m_LayerInput);
2901  }
2902 
2903  void VisitAdditionLayer(const armnn::IConnectableLayer*, const char*) override {}
2904 
2905  private:
2906  armnn::ConstTensor m_LayerInput;
2907  };
2908 
2909  const std::string layerName("constant");
2911 
2912  std::vector<float> constantData = GenerateRandomData<float>(info.GetNumElements());
2913  armnn::ConstTensor constTensor(info, constantData);
2914 
2916  armnn::IConnectableLayer* input = network->AddInputLayer(0);
2917  armnn::IConnectableLayer* add = network->AddAdditionLayer();
2918  armnn::IConnectableLayer* constant = network->AddConstantLayer(constTensor, layerName.c_str());
2919  armnn::IConnectableLayer* output = network->AddOutputLayer(0);
2920 
2921  input->GetOutputSlot(0).Connect(add->GetInputSlot(0));
2922  constant->GetOutputSlot(0).Connect(add->GetInputSlot(1));
2923  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2924 
2925  input->GetOutputSlot(0).SetTensorInfo(info);
2926  constant->GetOutputSlot(0).SetTensorInfo(info);
2927  add->GetOutputSlot(0).SetTensorInfo(info);
2928 
2929  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
2930  BOOST_CHECK(deserializedNetwork);
2931 
2932  ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
2933  deserializedNetwork->Accept(verifier);
2934 }
2935 
2936 class VerifyLstmLayer : public LayerVerifierBaseWithDescriptor<armnn::LstmDescriptor>
2937 {
2938 public:
2939  VerifyLstmLayer(const std::string& layerName,
2940  const std::vector<armnn::TensorInfo>& inputInfos,
2941  const std::vector<armnn::TensorInfo>& outputInfos,
2942  const armnn::LstmDescriptor& descriptor,
2943  const armnn::LstmInputParams& inputParams)
2944  : LayerVerifierBaseWithDescriptor<armnn::LstmDescriptor>(layerName, inputInfos, outputInfos, descriptor)
2945  , m_InputParams(inputParams) {}
2946 
2947  void VisitLstmLayer(const armnn::IConnectableLayer* layer,
2948  const armnn::LstmDescriptor& descriptor,
2949  const armnn::LstmInputParams& params,
2950  const char* name)
2951  {
2952  VerifyNameAndConnections(layer, name);
2953  VerifyDescriptor(descriptor);
2954  VerifyInputParameters(params);
2955  }
2956 
2957 protected:
2958  void VerifyInputParameters(const armnn::LstmInputParams& params)
2959  {
2960  VerifyConstTensors(
2961  "m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
2962  VerifyConstTensors(
2963  "m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
2964  VerifyConstTensors(
2965  "m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
2966  VerifyConstTensors(
2967  "m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
2968  VerifyConstTensors(
2969  "m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
2970  VerifyConstTensors(
2971  "m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
2972  VerifyConstTensors(
2973  "m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
2974  VerifyConstTensors(
2975  "m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
2976  VerifyConstTensors(
2977  "m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights);
2978  VerifyConstTensors(
2979  "m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights);
2980  VerifyConstTensors(
2981  "m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights);
2982  VerifyConstTensors(
2983  "m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias);
2984  VerifyConstTensors(
2985  "m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
2986  VerifyConstTensors(
2987  "m_CellBias", m_InputParams.m_CellBias, params.m_CellBias);
2988  VerifyConstTensors(
2989  "m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
2990  VerifyConstTensors(
2991  "m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights);
2992  VerifyConstTensors(
2993  "m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias);
2994  VerifyConstTensors(
2995  "m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights);
2996  VerifyConstTensors(
2997  "m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights);
2998  VerifyConstTensors(
2999  "m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights);
3000  VerifyConstTensors(
3001  "m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights);
3002  }
3003 
3004 private:
3005  armnn::LstmInputParams m_InputParams;
3006 };
3007 
3008 BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmCifgPeepholeNoProjection)
3009 {
3010  armnn::LstmDescriptor descriptor;
3011  descriptor.m_ActivationFunc = 4;
3012  descriptor.m_ClippingThresProj = 0.0f;
3013  descriptor.m_ClippingThresCell = 0.0f;
3014  descriptor.m_CifgEnabled = true; // if this is true then we DON'T need to set the OptCifgParams
3015  descriptor.m_ProjectionEnabled = false;
3016  descriptor.m_PeepholeEnabled = true;
3017 
3018  const uint32_t batchSize = 1;
3019  const uint32_t inputSize = 2;
3020  const uint32_t numUnits = 4;
3021  const uint32_t outputSize = numUnits;
3022 
3023  armnn::TensorInfo inputWeightsInfo1({numUnits, inputSize}, armnn::DataType::Float32);
3024  std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
3025  armnn::ConstTensor inputToForgetWeights(inputWeightsInfo1, inputToForgetWeightsData);
3026 
3027  std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
3028  armnn::ConstTensor inputToCellWeights(inputWeightsInfo1, inputToCellWeightsData);
3029 
3030  std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
3031  armnn::ConstTensor inputToOutputWeights(inputWeightsInfo1, inputToOutputWeightsData);
3032 
3033  armnn::TensorInfo inputWeightsInfo2({numUnits, outputSize}, armnn::DataType::Float32);
3034  std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
3035  armnn::ConstTensor recurrentToForgetWeights(inputWeightsInfo2, recurrentToForgetWeightsData);
3036 
3037  std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
3038  armnn::ConstTensor recurrentToCellWeights(inputWeightsInfo2, recurrentToCellWeightsData);
3039 
3040  std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
3041  armnn::ConstTensor recurrentToOutputWeights(inputWeightsInfo2, recurrentToOutputWeightsData);
3042 
3043  armnn::TensorInfo inputWeightsInfo3({numUnits}, armnn::DataType::Float32);
3044  std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
3045  armnn::ConstTensor cellToForgetWeights(inputWeightsInfo3, cellToForgetWeightsData);
3046 
3047  std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
3048  armnn::ConstTensor cellToOutputWeights(inputWeightsInfo3, cellToOutputWeightsData);
3049 
3050  std::vector<float> forgetGateBiasData(numUnits, 1.0f);
3051  armnn::ConstTensor forgetGateBias(inputWeightsInfo3, forgetGateBiasData);
3052 
3053  std::vector<float> cellBiasData(numUnits, 0.0f);
3054  armnn::ConstTensor cellBias(inputWeightsInfo3, cellBiasData);
3055 
3056  std::vector<float> outputGateBiasData(numUnits, 0.0f);
3057  armnn::ConstTensor outputGateBias(inputWeightsInfo3, outputGateBiasData);
3058 
3059  armnn::LstmInputParams params;
3060  params.m_InputToForgetWeights = &inputToForgetWeights;
3061  params.m_InputToCellWeights = &inputToCellWeights;
3062  params.m_InputToOutputWeights = &inputToOutputWeights;
3063  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3064  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
3065  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3066  params.m_ForgetGateBias = &forgetGateBias;
3067  params.m_CellBias = &cellBias;
3068  params.m_OutputGateBias = &outputGateBias;
3069  params.m_CellToForgetWeights = &cellToForgetWeights;
3070  params.m_CellToOutputWeights = &cellToOutputWeights;
3071 
3073  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
3074  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
3075  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
3076  const std::string layerName("lstm");
3077  armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
3078  armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
3079  armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
3080  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
3081  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
3082 
3083  // connect up
3084  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
3085  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
3086  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
3087  armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 3 }, armnn::DataType::Float32);
3088 
3089  inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
3090  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
3091 
3092  outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
3093  outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
3094 
3095  cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
3096  cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
3097 
3098  lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
3099  lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
3100 
3101  lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
3102  lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
3103 
3104  lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
3105  lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
3106 
3107  lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
3108  lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
3109 
3110  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
3111  BOOST_CHECK(deserializedNetwork);
3112 
3113  VerifyLstmLayer checker(
3114  layerName,
3115  {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
3116  {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
3117  descriptor,
3118  params);
3119  deserializedNetwork->Accept(checker);
3120 }
3121 
3122 BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeAndProjection)
3123 {
3124  armnn::LstmDescriptor descriptor;
3125  descriptor.m_ActivationFunc = 4;
3126  descriptor.m_ClippingThresProj = 0.0f;
3127  descriptor.m_ClippingThresCell = 0.0f;
3128  descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
3129  descriptor.m_ProjectionEnabled = true;
3130  descriptor.m_PeepholeEnabled = true;
3131 
3132  const uint32_t batchSize = 2;
3133  const uint32_t inputSize = 5;
3134  const uint32_t numUnits = 20;
3135  const uint32_t outputSize = 16;
3136 
3137  armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
3138  std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3139  armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
3140 
3141  std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3142  armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
3143 
3144  std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3145  armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
3146 
3147  std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3148  armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
3149 
3150  armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
3151  std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3152  armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
3153 
3154  std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3155  armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
3156 
3157  std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3158  armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
3159 
3160  std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3161  armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
3162 
3163  armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
3164  std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3165  armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
3166 
3167  std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3168  armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
3169 
3170  std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3171  armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
3172 
3173  std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3174  armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
3175 
3176  std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3177  armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
3178 
3179  std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3180  armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
3181 
3182  std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3183  armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
3184 
3185  armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
3186  std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
3187  armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
3188 
3189  armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
3190  std::vector<float> projectionBiasData(outputSize, 0.f);
3191  armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
3192 
3193  armnn::LstmInputParams params;
3194  params.m_InputToForgetWeights = &inputToForgetWeights;
3195  params.m_InputToCellWeights = &inputToCellWeights;
3196  params.m_InputToOutputWeights = &inputToOutputWeights;
3197  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3198  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
3199  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3200  params.m_ForgetGateBias = &forgetGateBias;
3201  params.m_CellBias = &cellBias;
3202  params.m_OutputGateBias = &outputGateBias;
3203 
3204  // additional params because: descriptor.m_CifgEnabled = false
3205  params.m_InputToInputWeights = &inputToInputWeights;
3206  params.m_RecurrentToInputWeights = &recurrentToInputWeights;
3207  params.m_CellToInputWeights = &cellToInputWeights;
3208  params.m_InputGateBias = &inputGateBias;
3209 
3210  // additional params because: descriptor.m_ProjectionEnabled = true
3211  params.m_ProjectionWeights = &projectionWeights;
3212  params.m_ProjectionBias = &projectionBias;
3213 
3214  // additional params because: descriptor.m_PeepholeEnabled = true
3215  params.m_CellToForgetWeights = &cellToForgetWeights;
3216  params.m_CellToOutputWeights = &cellToOutputWeights;
3217 
3219  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
3220  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
3221  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
3222  const std::string layerName("lstm");
3223  armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
3224  armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
3225  armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
3226  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
3227  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
3228 
3229  // connect up
3230  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
3231  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
3232  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
3233  armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
3234 
3235  inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
3236  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
3237 
3238  outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
3239  outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
3240 
3241  cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
3242  cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
3243 
3244  lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
3245  lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
3246 
3247  lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
3248  lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
3249 
3250  lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
3251  lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
3252 
3253  lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
3254  lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
3255 
3256  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
3257  BOOST_CHECK(deserializedNetwork);
3258 
3259  VerifyLstmLayer checker(
3260  layerName,
3261  {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
3262  {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
3263  descriptor,
3264  params);
3265  deserializedNetwork->Accept(checker);
3266 }
3267 
3268 BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeWithProjectionWithLayerNorm)
3269 {
3270  armnn::LstmDescriptor descriptor;
3271  descriptor.m_ActivationFunc = 4;
3272  descriptor.m_ClippingThresProj = 0.0f;
3273  descriptor.m_ClippingThresCell = 0.0f;
3274  descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
3275  descriptor.m_ProjectionEnabled = true;
3276  descriptor.m_PeepholeEnabled = true;
3277  descriptor.m_LayerNormEnabled = true;
3278 
3279  const uint32_t batchSize = 2;
3280  const uint32_t inputSize = 5;
3281  const uint32_t numUnits = 20;
3282  const uint32_t outputSize = 16;
3283 
3284  armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
3285  std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3286  armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
3287 
3288  std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3289  armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
3290 
3291  std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3292  armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
3293 
3294  std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
3295  armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
3296 
3297  armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
3298  std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3299  armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
3300 
3301  std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3302  armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
3303 
3304  std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3305  armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
3306 
3307  std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3308  armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
3309 
3310  armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
3311  std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3312  armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
3313 
3314  std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3315  armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
3316 
3317  std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3318  armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
3319 
3320  std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
3321  armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
3322 
3323  std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3324  armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
3325 
3326  std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3327  armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
3328 
3329  std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3330  armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
3331 
3332  armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
3333  std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
3334  armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
3335 
3336  armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
3337  std::vector<float> projectionBiasData(outputSize, 0.f);
3338  armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
3339 
3340  std::vector<float> inputLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3341  armnn::ConstTensor inputLayerNormWeights(tensorInfo20, forgetGateBiasData);
3342 
3343  std::vector<float> forgetLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3344  armnn::ConstTensor forgetLayerNormWeights(tensorInfo20, forgetGateBiasData);
3345 
3346  std::vector<float> cellLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3347  armnn::ConstTensor cellLayerNormWeights(tensorInfo20, forgetGateBiasData);
3348 
3349  std::vector<float> outLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
3350  armnn::ConstTensor outLayerNormWeights(tensorInfo20, forgetGateBiasData);
3351 
3352  armnn::LstmInputParams params;
3353  params.m_InputToForgetWeights = &inputToForgetWeights;
3354  params.m_InputToCellWeights = &inputToCellWeights;
3355  params.m_InputToOutputWeights = &inputToOutputWeights;
3356  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3357  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
3358  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3359  params.m_ForgetGateBias = &forgetGateBias;
3360  params.m_CellBias = &cellBias;
3361  params.m_OutputGateBias = &outputGateBias;
3362 
3363  // additional params because: descriptor.m_CifgEnabled = false
3364  params.m_InputToInputWeights = &inputToInputWeights;
3365  params.m_RecurrentToInputWeights = &recurrentToInputWeights;
3366  params.m_CellToInputWeights = &cellToInputWeights;
3367  params.m_InputGateBias = &inputGateBias;
3368 
3369  // additional params because: descriptor.m_ProjectionEnabled = true
3370  params.m_ProjectionWeights = &projectionWeights;
3371  params.m_ProjectionBias = &projectionBias;
3372 
3373  // additional params because: descriptor.m_PeepholeEnabled = true
3374  params.m_CellToForgetWeights = &cellToForgetWeights;
3375  params.m_CellToOutputWeights = &cellToOutputWeights;
3376 
3377  // additional params because: despriptor.m_LayerNormEnabled = true
3378  params.m_InputLayerNormWeights = &inputLayerNormWeights;
3379  params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3380  params.m_CellLayerNormWeights = &cellLayerNormWeights;
3381  params.m_OutputLayerNormWeights = &outLayerNormWeights;
3382 
3384  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
3385  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
3386  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
3387  const std::string layerName("lstm");
3388  armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
3389  armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
3390  armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
3391  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
3392  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
3393 
3394  // connect up
3395  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
3396  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
3397  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
3398  armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
3399 
3400  inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
3401  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
3402 
3403  outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
3404  outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
3405 
3406  cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
3407  cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
3408 
3409  lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
3410  lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
3411 
3412  lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
3413  lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
3414 
3415  lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
3416  lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
3417 
3418  lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
3419  lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
3420 
3421  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
3422  BOOST_CHECK(deserializedNetwork);
3423 
3424  VerifyLstmLayer checker(
3425  layerName,
3426  {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
3427  {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
3428  descriptor,
3429  params);
3430  deserializedNetwork->Accept(checker);
3431 }
3432 
3433 BOOST_AUTO_TEST_CASE(EnsureLstmLayersBackwardCompatibility)
3434 {
3435  // The hex data below is a flat buffer containing a lstm layer with no Cifg, with peephole and projection
3436  // enabled. That data was obtained before additional layer normalization parameters where added to the
3437  // lstm serializer. That way it can be tested if a lstm model with the old parameter configuration can
3438  // still be loaded
3439  const std::vector<uint8_t> lstmNoCifgWithPeepholeAndProjectionModel =
3440  {
3441  0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
3442  0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
3443  0xDC, 0x29, 0x00, 0x00, 0x38, 0x29, 0x00, 0x00, 0xB4, 0x28, 0x00, 0x00, 0x94, 0x01, 0x00, 0x00, 0x3C, 0x01,
3444  0x00, 0x00, 0xE0, 0x00, 0x00, 0x00, 0x84, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
3445  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00,
3446  0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x70, 0xD6, 0xFF, 0xFF,
3447  0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x06, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x88, 0xD7,
3448  0xFF, 0xFF, 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xF6, 0xD6, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00,
3449  0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
3450  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3451  0xE8, 0xD7, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xC8, 0xD6, 0xFF, 0xFF, 0x00, 0x00,
3452  0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x5E, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xE0, 0xD7, 0xFF, 0xFF,
3453  0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x4E, 0xD7, 0xFF, 0xFF, 0x06, 0x00, 0x00, 0x00, 0x10, 0x00,
3454  0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3455  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0xD8,
3456  0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
3457  0x04, 0x00, 0x00, 0x00, 0xB6, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x38, 0xD8, 0xFF, 0xFF, 0x08, 0x00,
3458  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xA6, 0xD7, 0xFF, 0xFF, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
3459  0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3460  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x98, 0xD8, 0xFF, 0xFF,
3461  0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x78, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00,
3462  0x00, 0x00, 0x0E, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x16, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
3463  0xFA, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00,
3464  0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
3465  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xEC, 0xD8, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
3466  0x00, 0x00, 0x6C, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x23, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00,
3467  0x12, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0xE0, 0x25, 0x00, 0x00, 0xD0, 0x25,
3468  0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x26, 0x00, 0x48, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00,
3469  0x10, 0x00, 0x14, 0x00, 0x18, 0x00, 0x1C, 0x00, 0x20, 0x00, 0x24, 0x00, 0x28, 0x00, 0x2C, 0x00, 0x30, 0x00,
3470  0x34, 0x00, 0x38, 0x00, 0x3C, 0x00, 0x40, 0x00, 0x44, 0x00, 0x26, 0x00, 0x00, 0x00, 0xC4, 0x23, 0x00, 0x00,
3471  0xF8, 0x21, 0x00, 0x00, 0x2C, 0x20, 0x00, 0x00, 0xF0, 0x1A, 0x00, 0x00, 0xB4, 0x15, 0x00, 0x00, 0x78, 0x10,
3472  0x00, 0x00, 0xF0, 0x0F, 0x00, 0x00, 0x68, 0x0F, 0x00, 0x00, 0xE0, 0x0E, 0x00, 0x00, 0x14, 0x0D, 0x00, 0x00,
3473  0xD8, 0x07, 0x00, 0x00, 0x50, 0x07, 0x00, 0x00, 0xC8, 0x06, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x14, 0x01,
3474  0x00, 0x00, 0x8C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
3475  0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00,
3476  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3477  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3478  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3479  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3480  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x5A, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
3481  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x72, 0xD8,
3482  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x82, 0xD9, 0xFF, 0xFF,
3483  0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3484  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3485  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3486  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3487  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDE, 0xD8,
3488  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
3489  0x14, 0x00, 0x00, 0x00, 0xF6, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x54, 0x00, 0x00, 0x00, 0x04, 0x00,
3490  0x00, 0x00, 0x06, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3491  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3492  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3493  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3494  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xD9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
3495  0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6A, 0xD9, 0xFF, 0xFF, 0x00, 0x00,
3496  0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7A, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
3497  0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3498  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3499  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3500  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3501  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3502  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3503  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3504  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3505  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3506  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3507  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3508  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3509  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3510  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3511  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3512  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3513  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3514  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3515  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3516  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3517  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3518  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3519  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3520  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3521  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3522  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3523  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3524  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3525  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3526  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3527  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3528  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3529  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3530  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3531  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3532  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3533  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3534  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3535  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3536  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3537  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3538  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3539  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3540  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3541  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3542  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3543  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3544  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3545  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3546  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3547  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3548  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3549  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3550  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3551  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3552  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3553  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3554  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3555  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3556  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3557  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3558  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3559  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3560  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3561  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3562  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3563  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3564  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3565  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3566  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3567  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3568  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x86, 0xDE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
3569  0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xA2, 0xDE,
3570  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB2, 0xDF, 0xFF, 0xFF,
3571  0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3572  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3573  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3574  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3575  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0xDF,
3576  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
3577  0x14, 0x00, 0x00, 0x00, 0x26, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00,
3578  0x00, 0x00, 0x36, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3579  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3580  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3581  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3582  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3583  0x00, 0x00, 0x00, 0x00, 0x92, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
3584  0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xAA, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
3585  0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xBA, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
3586  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3587  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3588  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3589  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3590  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3591  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3592  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3593  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3594  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3595  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3596  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3597  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3598  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3599  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3600  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3601  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3602  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3603  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3604  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3605  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3606  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3607  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3608  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3609  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3610  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3611  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3612  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3613  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3614  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3615  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3616  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3617  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3618  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3619  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3620  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3621  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3622  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3623  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3624  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3625  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3626  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3627  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3628  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3629  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3630  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3631  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3632  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3633  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3634  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3635  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3636  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3637  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3638  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3639  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3640  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3641  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3642  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3643  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3644  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3645  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3646  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3647  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3648  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3649  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3650  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3651  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3652  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3653  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3654  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3655  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3656  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3657  0x00, 0x00, 0x00, 0x00, 0xC6, 0xE4, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
3658  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xE2, 0xE4, 0xFF, 0xFF,
3659  0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF2, 0xE5, 0xFF, 0xFF, 0x04, 0x00,
3660  0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3661  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3662  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3663  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3664  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3665  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3666  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3667  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3668  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3669  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3670  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3671  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3672  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3673  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3674  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3675  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3676  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3677  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3678  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3679  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3680  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3681  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3682  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8E, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
3683  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
3684  0x00, 0x00, 0xAA, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
3685  0xBA, 0xE7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3686  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3687  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3688  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3689  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3690  0x00, 0x00, 0x16, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3691  0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x2E, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00,
3692  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x3E, 0xE8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00,
3693  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3694  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3695  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3696  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3697  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9A, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
3698  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xB2, 0xE7, 0xFF, 0xFF,
3699  0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xC2, 0xE8, 0xFF, 0xFF, 0x04, 0x00,
3700  0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3701  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3702  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3703  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3704  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1E, 0xE8, 0xFF, 0xFF,
3705  0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00,
3706  0x00, 0x00, 0x36, 0xE8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
3707  0x46, 0xE9, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3708  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3709  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3710  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3711  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3712  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3713  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3714  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3715  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3716  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3717  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3718  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3719  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3720  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3721  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3722  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3723  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3724  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3725  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3726  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3727  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3728  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3729  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3730  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3731  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3732  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3733  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3734  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3735  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3736  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3737  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3738  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3739  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3740  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3741  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3742  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3743  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3744  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3745  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3746  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3747  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3748  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3749  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3750  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3751  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3752  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3753  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3754  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3755  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3756  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3757  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3758  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3759  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3760  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3761  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3762  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3763  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3764  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3765  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3766  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3767  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3768  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3769  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3770  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3771  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3772  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3773  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3774  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3775  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3776  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3777  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3778  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xED, 0xFF, 0xFF,
3779  0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00,
3780  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6E, 0xED, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00,
3781  0x04, 0x00, 0x00, 0x00, 0x7E, 0xEE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00,
3782  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3783  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3784  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3785  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3786  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3787  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3788  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3789  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3790  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3791  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3792  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3793  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3794  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3795  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3796  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3797  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3798  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3799  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3800  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3801  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3802  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3803  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3804  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3805  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3806  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3807  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3808  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3809  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3810  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3811  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3812  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3813  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3814  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3815  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3816  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3817  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3818  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3819  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3820  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3821  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3822  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3823  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3824  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3825  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3826  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3827  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3828  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3829  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3830  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3831  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3832  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3833  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3834  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3835  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3836  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3837  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3838  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3839  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3840  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3841  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3842  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3843  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3844  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3845  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3846  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3847  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3848  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3849  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3850  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3851  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3852  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3853  0x8A, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
3854  0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xA6, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
3855  0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB6, 0xF3, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
3856  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3857  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3858  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3859  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3860  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3861  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3862  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3863  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3864  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3865  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3866  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3867  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3868  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3869  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3870  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3871  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3872  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3873  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3874  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3875  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3876  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3877  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3878  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3879  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3880  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3881  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3882  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3883  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3884  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3885  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3886  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3887  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3888  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3889  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3890  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3891  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3892  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3893  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3894  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3895  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3896  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3897  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3898  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3899  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3900  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3901  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3902  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3903  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3904  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3905  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3906  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3907  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3908  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3909  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3910  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3911  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3912  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3913  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3914  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3915  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3916  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3917  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3918  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3919  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3920  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3921  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3922  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3923  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3924  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3925  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3926  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3927  0x00, 0x00, 0x00, 0x00, 0xC2, 0xF7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
3928  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xDE, 0xF7, 0xFF, 0xFF,
3929  0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xF8, 0xFF, 0xFF, 0x04, 0x00,
3930  0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3931  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3932  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3933  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3934  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3935  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3936  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3937  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3938  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3939  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3940  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3941  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3942  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3943  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3944  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3945  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3946  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3947  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3948  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3949  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3950  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3951  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3952  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8A, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
3953  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
3954  0x00, 0x00, 0xA6, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
3955  0xB6, 0xFA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3956  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3957  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3958  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3959  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3960  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3961  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3962  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3963  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3964  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3965  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3966  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3967  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3968  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3969  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3970  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3971  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3972  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3973  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3974  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3975  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3976  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3977  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xFB,
3978  0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
3979  0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x6E, 0xFB, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01,
3980  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFC, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00,
3981  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3982  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3983  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3984  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3985  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3986  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3987  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3988  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3989  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3990  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3991  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3992  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3993  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3994  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3995  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3996  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3997  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3998  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3999  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
4000  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
4001  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
4002  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
4003  0x00, 0x00, 0x00, 0x00, 0x1A, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
4004  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x0C, 0x00,
4005  0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
4006  0x01, 0x01, 0x04, 0x00, 0x00, 0x00, 0x2E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
4007  0x22, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6C, 0x73,
4008  0x74, 0x6D, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xEC, 0x00, 0x00, 0x00, 0xD0, 0x00, 0x00, 0x00,
4009  0xB4, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x88, 0x00, 0x00, 0x00, 0x5C, 0x00, 0x00, 0x00, 0x30, 0x00,
4010  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x14, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
4011  0xA6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
4012  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x3C, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
4013  0x04, 0x00, 0x00, 0x00, 0xCE, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
4014  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x64, 0xFF, 0xFF, 0xFF,
4015  0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
4016  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
4017  0xB4, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x1A, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
4018  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00,
4019  0xF0, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00,
4020  0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
4021  0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
4022  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
4023  0x7E, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00,
4024  0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x76, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
4025  0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
4026  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
4027  0x68, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
4028  0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
4029  0x08, 0x00, 0x0E, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00,
4030  0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
4031  0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00,
4032  0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00,
4033  0x0E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
4034  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
4035  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
4036  0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6E, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
4037  0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x08, 0x00,
4038  0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
4039  0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00,
4040  0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
4041  0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
4042  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
4043  0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
4044  0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00,
4045  0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
4046  0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00
4047  };
4048 
4049  armnn::INetworkPtr deserializedNetwork =
4050  DeserializeNetwork(std::string(lstmNoCifgWithPeepholeAndProjectionModel.begin(),
4051  lstmNoCifgWithPeepholeAndProjectionModel.end()));
4052 
4053  BOOST_CHECK(deserializedNetwork);
4054 
4055  // generating the same model parameters which where used to serialize the model (Layer norm is not specified)
4056  armnn::LstmDescriptor descriptor;
4057  descriptor.m_ActivationFunc = 4;
4058  descriptor.m_ClippingThresProj = 0.0f;
4059  descriptor.m_ClippingThresCell = 0.0f;
4060  descriptor.m_CifgEnabled = false;
4061  descriptor.m_ProjectionEnabled = true;
4062  descriptor.m_PeepholeEnabled = true;
4063 
4064  const uint32_t batchSize = 2u;
4065  const uint32_t inputSize = 5u;
4066  const uint32_t numUnits = 20u;
4067  const uint32_t outputSize = 16u;
4068 
4069  armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
4070  std::vector<float> inputToInputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
4071  armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
4072 
4073  std::vector<float> inputToForgetWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
4074  armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
4075 
4076  std::vector<float> inputToCellWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
4077  armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
4078 
4079  std::vector<float> inputToOutputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
4080  armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
4081 
4082  armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
4083  std::vector<float> inputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
4084  armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
4085 
4086  std::vector<float> forgetGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
4087  armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
4088 
4089  std::vector<float> cellBiasData(tensorInfo20.GetNumElements(), 0.0f);
4090  armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
4091 
4092  std::vector<float> outputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
4093  armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
4094 
4095  armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
4096  std::vector<float> recurrentToInputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
4097  armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
4098 
4099  std::vector<float> recurrentToForgetWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
4100  armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
4101 
4102  std::vector<float> recurrentToCellWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
4103  armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
4104 
4105  std::vector<float> recurrentToOutputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
4106  armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
4107 
4108  std::vector<float> cellToInputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
4109  armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
4110 
4111  std::vector<float> cellToForgetWeightsData(tensorInfo20.GetNumElements(), 0.0f);
4112  armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
4113 
4114  std::vector<float> cellToOutputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
4115  armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
4116 
4117  armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
4118  std::vector<float> projectionWeightsData(tensorInfo16x20.GetNumElements(), 0.0f);
4119  armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
4120 
4121  armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
4122  std::vector<float> projectionBiasData(outputSize, 0.0f);
4123  armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
4124 
4125  armnn::LstmInputParams params;
4126  params.m_InputToForgetWeights = &inputToForgetWeights;
4127  params.m_InputToCellWeights = &inputToCellWeights;
4128  params.m_InputToOutputWeights = &inputToOutputWeights;
4129  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
4130  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
4131  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
4132  params.m_ForgetGateBias = &forgetGateBias;
4133  params.m_CellBias = &cellBias;
4134  params.m_OutputGateBias = &outputGateBias;
4135 
4136  // additional params because: descriptor.m_CifgEnabled = false
4137  params.m_InputToInputWeights = &inputToInputWeights;
4138  params.m_RecurrentToInputWeights = &recurrentToInputWeights;
4139  params.m_CellToInputWeights = &cellToInputWeights;
4140  params.m_InputGateBias = &inputGateBias;
4141 
4142  // additional params because: descriptor.m_ProjectionEnabled = true
4143  params.m_ProjectionWeights = &projectionWeights;
4144  params.m_ProjectionBias = &projectionBias;
4145 
4146  // additional params because: descriptor.m_PeepholeEnabled = true
4147  params.m_CellToForgetWeights = &cellToForgetWeights;
4148  params.m_CellToOutputWeights = &cellToOutputWeights;
4149 
4150  const std::string layerName("lstm");
4151  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
4152  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
4153  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
4154  armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
4155 
4156  VerifyLstmLayer checker(
4157  layerName,
4158  {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
4159  {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
4160  descriptor,
4161  params);
4162  deserializedNetwork->Accept(checker);
4163 }
4164 class VerifyQuantizedLstmLayer : public LayerVerifierBase
4165 {
4166 
4167 public:
4168  VerifyQuantizedLstmLayer(const std::string& layerName,
4169  const std::vector<armnn::TensorInfo>& inputInfos,
4170  const std::vector<armnn::TensorInfo>& outputInfos,
4171  const armnn::QuantizedLstmInputParams& inputParams)
4172  : LayerVerifierBase(layerName, inputInfos, outputInfos), m_InputParams(inputParams) {}
4173 
4174  void VisitQuantizedLstmLayer(const armnn::IConnectableLayer* layer,
4175  const armnn::QuantizedLstmInputParams& params,
4176  const char* name)
4177  {
4178  VerifyNameAndConnections(layer, name);
4179  VerifyInputParameters(params);
4180  }
4181 
4182 protected:
4183  void VerifyInputParameters(const armnn::QuantizedLstmInputParams& params)
4184  {
4185  VerifyConstTensors("m_InputToInputWeights",
4186  m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
4187  VerifyConstTensors("m_InputToForgetWeights",
4188  m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
4189  VerifyConstTensors("m_InputToCellWeights",
4190  m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
4191  VerifyConstTensors("m_InputToOutputWeights",
4192  m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
4193  VerifyConstTensors("m_RecurrentToInputWeights",
4194  m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
4195  VerifyConstTensors("m_RecurrentToForgetWeights",
4196  m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
4197  VerifyConstTensors("m_RecurrentToCellWeights",
4198  m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
4199  VerifyConstTensors("m_RecurrentToOutputWeights",
4200  m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
4201  VerifyConstTensors("m_InputGateBias",
4202  m_InputParams.m_InputGateBias, params.m_InputGateBias);
4203  VerifyConstTensors("m_ForgetGateBias",
4204  m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
4205  VerifyConstTensors("m_CellBias",
4206  m_InputParams.m_CellBias, params.m_CellBias);
4207  VerifyConstTensors("m_OutputGateBias",
4208  m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
4209  }
4210 
4211 private:
4212  armnn::QuantizedLstmInputParams m_InputParams;
4213 };
4214 
4215 BOOST_AUTO_TEST_CASE(SerializeDeserializeQuantizedLstm)
4216 {
4217  const uint32_t batchSize = 1;
4218  const uint32_t inputSize = 2;
4219  const uint32_t numUnits = 4;
4220  const uint32_t outputSize = numUnits;
4221 
4222  // Scale/Offset for input/output, cellState In/Out, weights, bias
4223  float inputOutputScale = 0.0078125f;
4224  int32_t inputOutputOffset = 128;
4225 
4226  float cellStateScale = 0.00048828125f;
4227  int32_t cellStateOffset = 0;
4228 
4229  float weightsScale = 0.00408021f;
4230  int32_t weightsOffset = 100;
4231 
4232  float biasScale = 3.1876640625e-05f;
4233  int32_t biasOffset = 0;
4234 
4235  // The shape of weight data is {outputSize, inputSize} = {4, 2}
4236  armnn::TensorShape inputToInputWeightsShape = {4, 2};
4237  std::vector<uint8_t> inputToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
4238  armnn::TensorInfo inputToInputWeightsInfo(inputToInputWeightsShape,
4240  weightsScale,
4241  weightsOffset);
4242  armnn::ConstTensor inputToInputWeights(inputToInputWeightsInfo, inputToInputWeightsData);
4243 
4244  armnn::TensorShape inputToForgetWeightsShape = {4, 2};
4245  std::vector<uint8_t> inputToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
4246  armnn::TensorInfo inputToForgetWeightsInfo(inputToForgetWeightsShape,
4248  weightsScale,
4249  weightsOffset);
4250  armnn::ConstTensor inputToForgetWeights(inputToForgetWeightsInfo, inputToForgetWeightsData);
4251 
4252  armnn::TensorShape inputToCellWeightsShape = {4, 2};
4253  std::vector<uint8_t> inputToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
4254  armnn::TensorInfo inputToCellWeightsInfo(inputToCellWeightsShape,
4256  weightsScale,
4257  weightsOffset);
4258  armnn::ConstTensor inputToCellWeights(inputToCellWeightsInfo, inputToCellWeightsData);
4259 
4260  armnn::TensorShape inputToOutputWeightsShape = {4, 2};
4261  std::vector<uint8_t> inputToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
4262  armnn::TensorInfo inputToOutputWeightsInfo(inputToOutputWeightsShape,
4264  weightsScale,
4265  weightsOffset);
4266  armnn::ConstTensor inputToOutputWeights(inputToOutputWeightsInfo, inputToOutputWeightsData);
4267 
4268  // The shape of recurrent weight data is {outputSize, outputSize} = {4, 4}
4269  armnn::TensorShape recurrentToInputWeightsShape = {4, 4};
4270  std::vector<uint8_t> recurrentToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
4271  armnn::TensorInfo recurrentToInputWeightsInfo(recurrentToInputWeightsShape,
4273  weightsScale,
4274  weightsOffset);
4275  armnn::ConstTensor recurrentToInputWeights(recurrentToInputWeightsInfo, recurrentToInputWeightsData);
4276 
4277  armnn::TensorShape recurrentToForgetWeightsShape = {4, 4};
4278  std::vector<uint8_t> recurrentToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
4279  armnn::TensorInfo recurrentToForgetWeightsInfo(recurrentToForgetWeightsShape,
4281  weightsScale,
4282  weightsOffset);
4283  armnn::ConstTensor recurrentToForgetWeights(recurrentToForgetWeightsInfo, recurrentToForgetWeightsData);
4284 
4285  armnn::TensorShape recurrentToCellWeightsShape = {4, 4};
4286  std::vector<uint8_t> recurrentToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
4287  armnn::TensorInfo recurrentToCellWeightsInfo(recurrentToCellWeightsShape,
4289  weightsScale,
4290  weightsOffset);
4291  armnn::ConstTensor recurrentToCellWeights(recurrentToCellWeightsInfo, recurrentToCellWeightsData);
4292 
4293  armnn::TensorShape recurrentToOutputWeightsShape = {4, 4};
4294  std::vector<uint8_t> recurrentToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
4295  armnn::TensorInfo recurrentToOutputWeightsInfo(recurrentToOutputWeightsShape,
4297  weightsScale,
4298  weightsOffset);
4299  armnn::ConstTensor recurrentToOutputWeights(recurrentToOutputWeightsInfo, recurrentToOutputWeightsData);
4300 
4301  // The shape of bias data is {outputSize} = {4}
4302  armnn::TensorShape inputGateBiasShape = {4};
4303  std::vector<int32_t> inputGateBiasData = {1, 2, 3, 4};
4304  armnn::TensorInfo inputGateBiasInfo(inputGateBiasShape,
4306  biasScale,
4307  biasOffset);
4308  armnn::ConstTensor inputGateBias(inputGateBiasInfo, inputGateBiasData);
4309 
4310  armnn::TensorShape forgetGateBiasShape = {4};
4311  std::vector<int32_t> forgetGateBiasData = {1, 2, 3, 4};
4312  armnn::TensorInfo forgetGateBiasInfo(forgetGateBiasShape,
4314  biasScale,
4315  biasOffset);
4316  armnn::ConstTensor forgetGateBias(forgetGateBiasInfo, forgetGateBiasData);
4317 
4318  armnn::TensorShape cellBiasShape = {4};
4319  std::vector<int32_t> cellBiasData = {1, 2, 3, 4};
4320  armnn::TensorInfo cellBiasInfo(cellBiasShape,
4322  biasScale,
4323  biasOffset);
4324  armnn::ConstTensor cellBias(cellBiasInfo, cellBiasData);
4325 
4326  armnn::TensorShape outputGateBiasShape = {4};
4327  std::vector<int32_t> outputGateBiasData = {1, 2, 3, 4};
4328  armnn::TensorInfo outputGateBiasInfo(outputGateBiasShape,
4330  biasScale,
4331  biasOffset);
4332  armnn::ConstTensor outputGateBias(outputGateBiasInfo, outputGateBiasData);
4333 
4335  params.m_InputToInputWeights = &inputToInputWeights;
4336  params.m_InputToForgetWeights = &inputToForgetWeights;
4337  params.m_InputToCellWeights = &inputToCellWeights;
4338  params.m_InputToOutputWeights = &inputToOutputWeights;
4339  params.m_RecurrentToInputWeights = &recurrentToInputWeights;
4340  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
4341  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
4342  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
4343  params.m_InputGateBias = &inputGateBias;
4344  params.m_ForgetGateBias = &forgetGateBias;
4345  params.m_CellBias = &cellBias;
4346  params.m_OutputGateBias = &outputGateBias;
4347 
4349  armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
4350  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
4351  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
4352  const std::string layerName("QuantizedLstm");
4353  armnn::IConnectableLayer* const quantizedLstmLayer = network->AddQuantizedLstmLayer(params, layerName.c_str());
4354  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(0);
4355  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(1);
4356 
4357  // Connect up
4358  armnn::TensorInfo inputTensorInfo({ batchSize, inputSize },
4360  inputOutputScale,
4361  inputOutputOffset);
4362  armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits },
4364  cellStateScale,
4365  cellStateOffset);
4366  armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize },
4368  inputOutputScale,
4369  inputOutputOffset);
4370 
4371  inputLayer->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(0));
4372  inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
4373 
4374  cellStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(1));
4375  cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
4376 
4377  outputStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(2));
4378  outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
4379 
4380  quantizedLstmLayer->GetOutputSlot(0).Connect(cellStateOut->GetInputSlot(0));
4381  quantizedLstmLayer->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
4382 
4383  quantizedLstmLayer->GetOutputSlot(1).Connect(outputLayer->GetInputSlot(0));
4384  quantizedLstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
4385 
4386  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
4387  BOOST_CHECK(deserializedNetwork);
4388 
4389  VerifyQuantizedLstmLayer checker(layerName,
4390  {inputTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
4391  {cellStateTensorInfo, outputStateTensorInfo},
4392  params);
4393 
4394  deserializedNetwork->Accept(checker);
4395 }
4396 
4397 class VerifyQLstmLayer : public LayerVerifierBaseWithDescriptor<armnn::QLstmDescriptor>
4398 {
4399 public:
4400  VerifyQLstmLayer(const std::string& layerName,
4401  const std::vector<armnn::TensorInfo>& inputInfos,
4402  const std::vector<armnn::TensorInfo>& outputInfos,
4403  const armnn::QLstmDescriptor& descriptor,
4404  const armnn::LstmInputParams& inputParams)
4405  : LayerVerifierBaseWithDescriptor<armnn::QLstmDescriptor>(layerName, inputInfos, outputInfos, descriptor)
4406  , m_InputParams(inputParams) {}
4407 
4408  void VisitQLstmLayer(const armnn::IConnectableLayer* layer,
4409  const armnn::QLstmDescriptor& descriptor,
4410  const armnn::LstmInputParams& params,
4411  const char* name)
4412  {
4413  VerifyNameAndConnections(layer, name);
4414  VerifyDescriptor(descriptor);
4415  VerifyInputParameters(params);
4416  }
4417 
4418 protected:
4419  void VerifyInputParameters(const armnn::LstmInputParams& params)
4420  {
4421  VerifyConstTensors(
4422  "m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
4423  VerifyConstTensors(
4424  "m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
4425  VerifyConstTensors(
4426  "m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
4427  VerifyConstTensors(
4428  "m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
4429  VerifyConstTensors(
4430  "m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
4431  VerifyConstTensors(
4432  "m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
4433  VerifyConstTensors(
4434  "m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
4435  VerifyConstTensors(
4436  "m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
4437  VerifyConstTensors(
4438  "m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights);
4439  VerifyConstTensors(
4440  "m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights);
4441  VerifyConstTensors(
4442  "m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights);
4443  VerifyConstTensors(
4444  "m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias);
4445  VerifyConstTensors(
4446  "m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
4447  VerifyConstTensors(
4448  "m_CellBias", m_InputParams.m_CellBias, params.m_CellBias);
4449  VerifyConstTensors(
4450  "m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
4451  VerifyConstTensors(
4452  "m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights);
4453  VerifyConstTensors(
4454  "m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias);
4455  VerifyConstTensors(
4456  "m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights);
4457  VerifyConstTensors(
4458  "m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights);
4459  VerifyConstTensors(
4460  "m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights);
4461  VerifyConstTensors(
4462  "m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights);
4463  }
4464 
4465 private:
4466  armnn::LstmInputParams m_InputParams;
4467 };
4468 
4469 BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmBasic)
4470 {
4471  armnn::QLstmDescriptor descriptor;
4472 
4473  descriptor.m_CifgEnabled = true;
4474  descriptor.m_ProjectionEnabled = false;
4475  descriptor.m_PeepholeEnabled = false;
4476  descriptor.m_LayerNormEnabled = false;
4477 
4478  descriptor.m_CellClip = 0.0f;
4479  descriptor.m_ProjectionClip = 0.0f;
4480 
4481  descriptor.m_InputIntermediateScale = 0.00001f;
4482  descriptor.m_ForgetIntermediateScale = 0.00001f;
4483  descriptor.m_CellIntermediateScale = 0.00001f;
4484  descriptor.m_OutputIntermediateScale = 0.00001f;
4485 
4486  descriptor.m_HiddenStateScale = 0.07f;
4487  descriptor.m_HiddenStateZeroPoint = 0;
4488 
4489  const unsigned int numBatches = 2;
4490  const unsigned int inputSize = 5;
4491  const unsigned int outputSize = 4;
4492  const unsigned int numUnits = 4;
4493 
4494  // Scale/Offset quantization info
4495  float inputScale = 0.0078f;
4496  int32_t inputOffset = 0;
4497 
4498  float outputScale = 0.0078f;
4499  int32_t outputOffset = 0;
4500 
4501  float cellStateScale = 3.5002e-05f;
4502  int32_t cellStateOffset = 0;
4503 
4504  float weightsScale = 0.007f;
4505  int32_t weightsOffset = 0;
4506 
4507  float biasScale = 3.5002e-05f / 1024;
4508  int32_t biasOffset = 0;
4509 
4510  // Weights and bias tensor and quantization info
4511  armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
4513  weightsScale,
4514  weightsOffset);
4515 
4516  armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
4518  weightsScale,
4519  weightsOffset);
4520 
4521  armnn::TensorInfo biasInfo({numUnits}, armnn::DataType::Signed32, biasScale, biasOffset);
4522 
4523  std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4524  std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4525  std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4526 
4527  armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
4528  armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
4529  armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
4530 
4531  std::vector<int8_t> recurrentToForgetWeightsData =
4532  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4533  std::vector<int8_t> recurrentToCellWeightsData =
4534  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4535  std::vector<int8_t> recurrentToOutputWeightsData =
4536  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4537 
4538  armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
4539  armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
4540  armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
4541 
4542  std::vector<int32_t> forgetGateBiasData(numUnits, 1);
4543  std::vector<int32_t> cellBiasData(numUnits, 0);
4544  std::vector<int32_t> outputGateBiasData(numUnits, 0);
4545 
4546  armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
4547  armnn::ConstTensor cellBias(biasInfo, cellBiasData);
4548  armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
4549 
4550  // Set up params
4551  armnn::LstmInputParams params;
4552  params.m_InputToForgetWeights = &inputToForgetWeights;
4553  params.m_InputToCellWeights = &inputToCellWeights;
4554  params.m_InputToOutputWeights = &inputToOutputWeights;
4555 
4556  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
4557  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
4558  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
4559 
4560  params.m_ForgetGateBias = &forgetGateBias;
4561  params.m_CellBias = &cellBias;
4562  params.m_OutputGateBias = &outputGateBias;
4563 
4564  // Create network
4566  const std::string layerName("qLstm");
4567 
4568  armnn::IConnectableLayer* const input = network->AddInputLayer(0);
4569  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
4570  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
4571 
4572  armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
4573 
4574  armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
4575  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
4576  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
4577 
4578  // Input/Output tensor info
4579  armnn::TensorInfo inputInfo({numBatches , inputSize},
4581  inputScale,
4582  inputOffset);
4583 
4584  armnn::TensorInfo cellStateInfo({numBatches , numUnits},
4586  cellStateScale,
4587  cellStateOffset);
4588 
4589  armnn::TensorInfo outputStateInfo({numBatches , outputSize},
4591  outputScale,
4592  outputOffset);
4593 
4594  // Connect input/output slots
4595  input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
4596  input->GetOutputSlot(0).SetTensorInfo(inputInfo);
4597 
4598  outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
4599  outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
4600 
4601  cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
4602  cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
4603 
4604  qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
4605  qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
4606 
4607  qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
4608  qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
4609 
4610  qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
4611  qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
4612 
4613  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
4614  BOOST_CHECK(deserializedNetwork);
4615 
4616  VerifyQLstmLayer checker(layerName,
4617  {inputInfo, cellStateInfo, outputStateInfo},
4618  {outputStateInfo, cellStateInfo, outputStateInfo},
4619  descriptor,
4620  params);
4621 
4622  deserializedNetwork->Accept(checker);
4623 }
4624 
4625 BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmCifgLayerNorm)
4626 {
4627  armnn::QLstmDescriptor descriptor;
4628 
4629  // CIFG params are used when CIFG is disabled
4630  descriptor.m_CifgEnabled = true;
4631  descriptor.m_ProjectionEnabled = false;
4632  descriptor.m_PeepholeEnabled = false;
4633  descriptor.m_LayerNormEnabled = true;
4634 
4635  descriptor.m_CellClip = 0.0f;
4636  descriptor.m_ProjectionClip = 0.0f;
4637 
4638  descriptor.m_InputIntermediateScale = 0.00001f;
4639  descriptor.m_ForgetIntermediateScale = 0.00001f;
4640  descriptor.m_CellIntermediateScale = 0.00001f;
4641  descriptor.m_OutputIntermediateScale = 0.00001f;
4642 
4643  descriptor.m_HiddenStateScale = 0.07f;
4644  descriptor.m_HiddenStateZeroPoint = 0;
4645 
4646  const unsigned int numBatches = 2;
4647  const unsigned int inputSize = 5;
4648  const unsigned int outputSize = 4;
4649  const unsigned int numUnits = 4;
4650 
4651  // Scale/Offset quantization info
4652  float inputScale = 0.0078f;
4653  int32_t inputOffset = 0;
4654 
4655  float outputScale = 0.0078f;
4656  int32_t outputOffset = 0;
4657 
4658  float cellStateScale = 3.5002e-05f;
4659  int32_t cellStateOffset = 0;
4660 
4661  float weightsScale = 0.007f;
4662  int32_t weightsOffset = 0;
4663 
4664  float layerNormScale = 3.5002e-05f;
4665  int32_t layerNormOffset = 0;
4666 
4667  float biasScale = layerNormScale / 1024;
4668  int32_t biasOffset = 0;
4669 
4670  // Weights and bias tensor and quantization info
4671  armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
4673  weightsScale,
4674  weightsOffset);
4675 
4676  armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
4678  weightsScale,
4679  weightsOffset);
4680 
4681  armnn::TensorInfo biasInfo({numUnits},
4683  biasScale,
4684  biasOffset);
4685 
4686  armnn::TensorInfo layerNormWeightsInfo({numUnits},
4688  layerNormScale,
4689  layerNormOffset);
4690 
4691  // Mandatory params
4692  std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4693  std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4694  std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4695 
4696  armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
4697  armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
4698  armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
4699 
4700  std::vector<int8_t> recurrentToForgetWeightsData =
4701  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4702  std::vector<int8_t> recurrentToCellWeightsData =
4703  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4704  std::vector<int8_t> recurrentToOutputWeightsData =
4705  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4706 
4707  armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
4708  armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
4709  armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
4710 
4711  std::vector<int32_t> forgetGateBiasData(numUnits, 1);
4712  std::vector<int32_t> cellBiasData(numUnits, 0);
4713  std::vector<int32_t> outputGateBiasData(numUnits, 0);
4714 
4715  armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
4716  armnn::ConstTensor cellBias(biasInfo, cellBiasData);
4717  armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
4718 
4719  // Layer Norm
4720  std::vector<int16_t> forgetLayerNormWeightsData =
4721  GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
4722  std::vector<int16_t> cellLayerNormWeightsData =
4723  GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
4724  std::vector<int16_t> outputLayerNormWeightsData =
4725  GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
4726 
4727  armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData);
4728  armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData);
4729  armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData);
4730 
4731  // Set up params
4732  armnn::LstmInputParams params;
4733 
4734  // Mandatory params
4735  params.m_InputToForgetWeights = &inputToForgetWeights;
4736  params.m_InputToCellWeights = &inputToCellWeights;
4737  params.m_InputToOutputWeights = &inputToOutputWeights;
4738 
4739  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
4740  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
4741  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
4742 
4743  params.m_ForgetGateBias = &forgetGateBias;
4744  params.m_CellBias = &cellBias;
4745  params.m_OutputGateBias = &outputGateBias;
4746 
4747  // Layer Norm
4748  params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
4749  params.m_CellLayerNormWeights = &cellLayerNormWeights;
4750  params.m_OutputLayerNormWeights = &outputLayerNormWeights;
4751 
4752  // Create network
4754  const std::string layerName("qLstm");
4755 
4756  armnn::IConnectableLayer* const input = network->AddInputLayer(0);
4757  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
4758  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
4759 
4760  armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
4761 
4762  armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
4763  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
4764  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
4765 
4766  // Input/Output tensor info
4767  armnn::TensorInfo inputInfo({numBatches , inputSize},
4769  inputScale,
4770  inputOffset);
4771 
4772  armnn::TensorInfo cellStateInfo({numBatches , numUnits},
4774  cellStateScale,
4775  cellStateOffset);
4776 
4777  armnn::TensorInfo outputStateInfo({numBatches , outputSize},
4779  outputScale,
4780  outputOffset);
4781 
4782  // Connect input/output slots
4783  input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
4784  input->GetOutputSlot(0).SetTensorInfo(inputInfo);
4785 
4786  outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
4787  outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
4788 
4789  cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
4790  cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
4791 
4792  qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
4793  qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
4794 
4795  qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
4796  qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
4797 
4798  qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
4799  qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
4800 
4801  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
4802  BOOST_CHECK(deserializedNetwork);
4803 
4804  VerifyQLstmLayer checker(layerName,
4805  {inputInfo, cellStateInfo, outputStateInfo},
4806  {outputStateInfo, cellStateInfo, outputStateInfo},
4807  descriptor,
4808  params);
4809 
4810  deserializedNetwork->Accept(checker);
4811 }
4812 
4813 BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmAdvanced)
4814 {
4815  armnn::QLstmDescriptor descriptor;
4816 
4817  descriptor.m_CifgEnabled = false;
4818  descriptor.m_ProjectionEnabled = true;
4819  descriptor.m_PeepholeEnabled = true;
4820  descriptor.m_LayerNormEnabled = true;
4821 
4822  descriptor.m_CellClip = 0.1f;
4823  descriptor.m_ProjectionClip = 0.1f;
4824 
4825  descriptor.m_InputIntermediateScale = 0.00001f;
4826  descriptor.m_ForgetIntermediateScale = 0.00001f;
4827  descriptor.m_CellIntermediateScale = 0.00001f;
4828  descriptor.m_OutputIntermediateScale = 0.00001f;
4829 
4830  descriptor.m_HiddenStateScale = 0.07f;
4831  descriptor.m_HiddenStateZeroPoint = 0;
4832 
4833  const unsigned int numBatches = 2;
4834  const unsigned int inputSize = 5;
4835  const unsigned int outputSize = 4;
4836  const unsigned int numUnits = 4;
4837 
4838  // Scale/Offset quantization info
4839  float inputScale = 0.0078f;
4840  int32_t inputOffset = 0;
4841 
4842  float outputScale = 0.0078f;
4843  int32_t outputOffset = 0;
4844 
4845  float cellStateScale = 3.5002e-05f;
4846  int32_t cellStateOffset = 0;
4847 
4848  float weightsScale = 0.007f;
4849  int32_t weightsOffset = 0;
4850 
4851  float layerNormScale = 3.5002e-05f;
4852  int32_t layerNormOffset = 0;
4853 
4854  float biasScale = layerNormScale / 1024;
4855  int32_t biasOffset = 0;
4856 
4857  // Weights and bias tensor and quantization info
4858  armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
4860  weightsScale,
4861  weightsOffset);
4862 
4863  armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
4865  weightsScale,
4866  weightsOffset);
4867 
4868  armnn::TensorInfo biasInfo({numUnits},
4870  biasScale,
4871  biasOffset);
4872 
4873  armnn::TensorInfo peepholeWeightsInfo({numUnits},
4875  weightsScale,
4876  weightsOffset);
4877 
4878  armnn::TensorInfo layerNormWeightsInfo({numUnits},
4880  layerNormScale,
4881  layerNormOffset);
4882 
4883  armnn::TensorInfo projectionWeightsInfo({outputSize, numUnits},
4885  weightsScale,
4886  weightsOffset);
4887 
4888  // Mandatory params
4889  std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4890  std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4891  std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4892 
4893  armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
4894  armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
4895  armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
4896 
4897  std::vector<int8_t> recurrentToForgetWeightsData =
4898  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4899  std::vector<int8_t> recurrentToCellWeightsData =
4900  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4901  std::vector<int8_t> recurrentToOutputWeightsData =
4902  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4903 
4904  armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
4905  armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
4906  armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
4907 
4908  std::vector<int32_t> forgetGateBiasData(numUnits, 1);
4909  std::vector<int32_t> cellBiasData(numUnits, 0);
4910  std::vector<int32_t> outputGateBiasData(numUnits, 0);
4911 
4912  armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
4913  armnn::ConstTensor cellBias(biasInfo, cellBiasData);
4914  armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
4915 
4916  // CIFG
4917  std::vector<int8_t> inputToInputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
4918  std::vector<int8_t> recurrentToInputWeightsData =
4919  GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
4920  std::vector<int32_t> inputGateBiasData(numUnits, 1);
4921 
4922  armnn::ConstTensor inputToInputWeights(inputWeightsInfo, inputToInputWeightsData);
4923  armnn::ConstTensor recurrentToInputWeights(recurrentWeightsInfo, recurrentToInputWeightsData);
4924  armnn::ConstTensor inputGateBias(biasInfo, inputGateBiasData);
4925 
4926  // Peephole
4927  std::vector<int16_t> cellToInputWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
4928  std::vector<int16_t> cellToForgetWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
4929  std::vector<int16_t> cellToOutputWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
4930 
4931  armnn::ConstTensor cellToInputWeights(peepholeWeightsInfo, cellToInputWeightsData);
4932  armnn::ConstTensor cellToForgetWeights(peepholeWeightsInfo, cellToForgetWeightsData);
4933  armnn::ConstTensor cellToOutputWeights(peepholeWeightsInfo, cellToOutputWeightsData);
4934 
4935  // Projection
4936  std::vector<int8_t> projectionWeightsData = GenerateRandomData<int8_t>(projectionWeightsInfo.GetNumElements());
4937  std::vector<int32_t> projectionBiasData(outputSize, 1);
4938 
4939  armnn::ConstTensor projectionWeights(projectionWeightsInfo, projectionWeightsData);
4940  armnn::ConstTensor projectionBias(biasInfo, projectionBiasData);
4941 
4942  // Layer Norm
4943  std::vector<int16_t> inputLayerNormWeightsData =
4944  GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
4945  std::vector<int16_t> forgetLayerNormWeightsData =
4946  GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
4947  std::vector<int16_t> cellLayerNormWeightsData =
4948  GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
4949  std::vector<int16_t> outputLayerNormWeightsData =
4950  GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
4951 
4952  armnn::ConstTensor inputLayerNormWeights(layerNormWeightsInfo, inputLayerNormWeightsData);
4953  armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData);
4954  armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData);
4955  armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData);
4956 
4957  // Set up params
4958  armnn::LstmInputParams params;
4959 
4960  // Mandatory params
4961  params.m_InputToForgetWeights = &inputToForgetWeights;
4962  params.m_InputToCellWeights = &inputToCellWeights;
4963  params.m_InputToOutputWeights = &inputToOutputWeights;
4964 
4965  params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
4966  params.m_RecurrentToCellWeights = &recurrentToCellWeights;
4967  params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
4968 
4969  params.m_ForgetGateBias = &forgetGateBias;
4970  params.m_CellBias = &cellBias;
4971  params.m_OutputGateBias = &outputGateBias;
4972 
4973  // CIFG
4974  params.m_InputToInputWeights = &inputToInputWeights;
4975  params.m_RecurrentToInputWeights = &recurrentToInputWeights;
4976  params.m_InputGateBias = &inputGateBias;
4977 
4978  // Peephole
4979  params.m_CellToInputWeights = &cellToInputWeights;
4980  params.m_CellToForgetWeights = &cellToForgetWeights;
4981  params.m_CellToOutputWeights = &cellToOutputWeights;
4982 
4983  // Projection
4984  params.m_ProjectionWeights = &projectionWeights;
4985  params.m_ProjectionBias = &projectionBias;
4986 
4987  // Layer Norm
4988  params.m_InputLayerNormWeights = &inputLayerNormWeights;
4989  params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
4990  params.m_CellLayerNormWeights = &cellLayerNormWeights;
4991  params.m_OutputLayerNormWeights = &outputLayerNormWeights;
4992 
4993  // Create network
4995  const std::string layerName("qLstm");
4996 
4997  armnn::IConnectableLayer* const input = network->AddInputLayer(0);
4998  armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
4999  armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
5000 
5001  armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
5002 
5003  armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
5004  armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
5005  armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
5006 
5007  // Input/Output tensor info
5008  armnn::TensorInfo inputInfo({numBatches , inputSize},
5010  inputScale,
5011  inputOffset);
5012 
5013  armnn::TensorInfo cellStateInfo({numBatches , numUnits},
5015  cellStateScale,
5016  cellStateOffset);
5017 
5018  armnn::TensorInfo outputStateInfo({numBatches , outputSize},
5020  outputScale,
5021  outputOffset);
5022 
5023  // Connect input/output slots
5024  input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
5025  input->GetOutputSlot(0).SetTensorInfo(inputInfo);
5026 
5027  outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
5028  outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
5029 
5030  cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
5031  cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
5032 
5033  qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
5034  qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
5035 
5036  qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
5037  qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
5038 
5039  qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
5040  qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
5041 
5042  armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
5043  BOOST_CHECK(deserializedNetwork);
5044 
5045  VerifyQLstmLayer checker(layerName,
5046  {inputInfo, cellStateInfo, outputStateInfo},
5047  {outputStateInfo, cellStateInfo, outputStateInfo},
5048  descriptor,
5049  params);
5050 
5051  deserializedNetwork->Accept(checker);
5052 }
5053 
BOOST_AUTO_TEST_SUITE(TensorflowLiteParser)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
virtual unsigned int GetNumOutputSlots() const =0
Returns the number of connectable output slots.
bool m_ProjectionEnabled
Enable/disable the projection layer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
const ConstTensor * m_ProjectionWeights
Definition: LstmParams.hpp:55
float Dequantize(QuantizedType value, float scale, int32_t offset)
Dequantize an 8-bit data type into a floating point data type.
Definition: TypesUtils.cpp:47
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
float m_ScaleW
Center size encoding scale weight.
const ConstTensor * m_CellBias
Definition: LstmParams.hpp:53
bool m_BiasEnabled
Enable/disable bias.
virtual unsigned int GetNumInputSlots() const =0
Returns the number of connectable input slots.
void Slice(const TensorInfo &inputInfo, const SliceDescriptor &descriptor, const void *inputData, void *outputData, unsigned int dataTypeSize)
Definition: Slice.cpp:16
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
const TensorShape & GetShape() const
Definition: Tensor.hpp:187
const ConstTensor * m_RecurrentToOutputWeights
uint32_t m_PadBottom
Padding bottom value in the height dimension.
float m_ClippingThresProj
Clipping threshold value for the projection.
A ReshapeDescriptor for the ReshapeLayer.
void ArgMinMax(Decoder< float > &in, int32_t *out, const TensorInfo &inputTensorInfo, const TensorInfo &outputTensorInfo, ArgMinMaxFunction function, int axis)
Definition: ArgMinMax.cpp:15
const ConstTensor * m_CellToOutputWeights
Definition: LstmParams.hpp:50
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
const ConstTensor * m_RecurrentToForgetWeights
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
void Fill(Encoder< float > &output, const TensorShape &desiredOutputShape, const float value)
Creates a tensor and fills it with a scalar value.
Definition: Fill.cpp:13
A ComparisonDescriptor for the ComparisonLayer.
Definition: Descriptors.hpp:70
float m_ScaleX
Center size encoding scale x.
uint32_t m_TargetWidth
Target width value.
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
bool m_PeepholeEnabled
Enable/disable peephole.
A Convolution2dDescriptor for the Convolution2dLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
float m_HiddenStateScale
Hidden State quantization scale.
bool m_BiasEnabled
Enable/disable bias.
const ConstTensor * m_CellToInputWeights
Definition: LstmParams.hpp:48
const TensorShape & GetShape() const
Definition: Tensor.hpp:268
float m_OutputIntermediateScale
Output intermediate quantization scale.
ResizeMethod m_Method
The Interpolation method to use (Bilinear, NearestNeighbor).
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
float m_Beta
Exponentiation value.
The padding fields don&#39;t count and are ignored.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
const ConstTensor * m_InputGateBias
Definition: LstmParams.hpp:51
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
Definition: Descriptors.hpp:64
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
armnn::TensorInfo anchorsInfo({ 6, 4 }, armnn::DataType::Float32)
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Main network class which provides the interface for building up a neural network. ...
Definition: INetwork.hpp:105
unsigned int GetNumElements() const
Definition: Tensor.hpp:274
const ConstTensor * m_RecurrentToCellWeights
Definition: LstmParams.hpp:46
void Serialize(const armnn::INetwork &inNetwork) override
Serializes the network to ArmNN SerializedGraph.
void Transpose(const armnn::TensorShape &dstShape, const armnn::PermutationVector &mappings, const void *src, void *dst, size_t dataTypeSize)
Definition: Transpose.cpp:120
void DepthToSpace(const TensorInfo &inputInfo, const DepthToSpaceDescriptor &descriptor, const void *inputData, void *outputData, unsigned int dataTypeSize)
uint32_t m_PadRight
Padding right value in the width dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
MemoryType GetMemoryArea() const
Definition: Tensor.hpp:276
const ConstTensor * m_ForgetLayerNormWeights
Definition: LstmParams.hpp:58
const ConstTensor * m_CellToForgetWeights
Definition: LstmParams.hpp:49
Copyright (c) 2020 ARM Limited.
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_DilationY
Dilation along y axis.
int32_t m_EndMask
End mask value.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
uint32_t m_DilationY
Dilation factor value for height dimension.
#define DECLARE_LAYER_VERIFIER_CLASS(name)
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:194
const ConstTensor * m_OutputGateBias
Definition: LstmParams.hpp:54
void Stack(const StackQueueDescriptor &data, std::vector< std::unique_ptr< Decoder< float >>> &inputs, Encoder< float > &output)
Definition: Stack.cpp:12
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
constexpr const char * GetDataTypeName(DataType dataType)
Definition: TypesUtils.hpp:168
A ResizeDescriptor for the ResizeLayer.
BOOST_AUTO_TEST_CASE(SerializeAddition)
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
A StackDescriptor for the StackLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
A PadDescriptor for the PadLayer.
void Permute(const armnn::TensorShape &dstShape, const armnn::PermutationVector &mappings, const void *src, void *dst, size_t dataTypeSize)
Definition: Permute.cpp:131
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
const ConstTensor * m_InputLayerNormWeights
Definition: LstmParams.hpp:57
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
bool m_LayerNormEnabled
Enable/disable layer normalization.
DataType
Definition: Types.hpp:32
float m_NmsIouThreshold
Intersection over union threshold.
const ConstTensor * m_RecurrentToOutputWeights
Definition: LstmParams.hpp:47
An LstmDescriptor for the LstmLayer.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
uint32_t m_DilationX
Dilation factor value for width dimension.
uint32_t m_PadTop
Padding top value in the height dimension.
Status SetViewSize(uint32_t view, uint32_t coord, uint32_t value)
Set the size of the views.
An output connection slot for a layer.
Definition: INetwork.hpp:37
A L2NormalizationDescriptor for the L2NormalizationLayer.
const ConstTensor * m_ProjectionBias
Definition: LstmParams.hpp:56
int32_t GetQuantizationOffset() const
Definition: Tensor.cpp:470
const ConstTensor * m_InputToForgetWeights
An ArgMinMaxDescriptor for ArgMinMaxLayer.
Definition: Descriptors.hpp:51
float GetQuantizationScale() const
Definition: Tensor.cpp:453
DataType GetDataType() const
Definition: Tensor.hpp:194
An OriginsDescriptor for the ConcatLayer.
float m_ProjectionClip
Clipping threshold value for the projection.
bool has_value() const noexcept
Definition: Optional.hpp:53
A FullyConnectedDescriptor for the FullyConnectedLayer.
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:298
float m_InputIntermediateScale
Input intermediate quantization scale.
uint32_t m_TargetWidth
Target width value.
A GatherDescriptor for the GatherLayer.
bool m_PeepholeEnabled
Enable/disable peephole.
uint32_t m_NumClasses
Number of classes.
bool m_HalfPixelCenters
Half Pixel Centers.
uint32_t m_PadTop
Padding top value in the height dimension.
A StandInDescriptor for the StandIn layer.
A QLstmDescriptor for the QLstmLayer.
QuantizedType Quantize(float value, float scale, int32_t offset)
Quantize a floating point data type into an 8-bit data type.
Definition: TypesUtils.cpp:31
bool m_UseRegularNms
Use Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
void LogSoftmax(Decoder< float > &input, Encoder< float > &output, const TensorInfo &inputInfo, const LogSoftmaxDescriptor &descriptor)
Definition: LogSoftmax.cpp:30
const TensorInfo & GetInfo() const
Definition: Tensor.hpp:266
uint32_t m_TargetHeight
Target height value.
uint32_t m_ActivationFunc
The activation function to use.
A SliceDescriptor for the SliceLayer.
Visitor base class with empty implementations.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
void SpaceToBatchNd(const TensorInfo &inputInfo, const TensorInfo &outputInfo, const SpaceToBatchNdDescriptor &params, Decoder< float > &inputData, Encoder< float > &outputData)
bool SaveSerializedToStream(std::ostream &stream) override
Serializes the SerializedGraph to the stream.
const ConstTensor * m_RecurrentToInputWeights
float m_ClippingThresCell
Clipping threshold value for the cell state.
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
float m_ForgetIntermediateScale
Forget intermediate quantization scale.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
float m_ScaleH
Center size encoding scale height.
ComparisonOperation m_Operation
Specifies the comparison operation to execute.
Definition: Descriptors.hpp:86
const ConstTensor * m_CellLayerNormWeights
Definition: LstmParams.hpp:59
const ConstTensor * m_ForgetGateBias
Definition: LstmParams.hpp:52
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
const ConstTensor * m_InputToCellWeights
Definition: LstmParams.hpp:42
const ConstTensor * m_InputToOutputWeights
Definition: LstmParams.hpp:43
float m_CellClip
Clipping threshold value for the cell state.
uint32_t m_DilationX
Dilation along x axis.
BOOST_AUTO_TEST_SUITE_END()
bool m_CifgEnabled
Enable/disable cifg (coupled input & forget gate).
uint32_t m_PadLeft
Padding left value in the width dimension.
void StridedSlice(const TensorInfo &inputInfo, const StridedSliceDescriptor &params, const void *inputData, void *outputData, unsigned int dataTypeSize)
bool m_AlignCorners
Aligned corners.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int32_t m_Axis
The axis in params to gather indices from.
const ConstTensor * m_RecurrentToForgetWeights
Definition: LstmParams.hpp:45
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
bool m_ProjectionEnabled
Enable/disable the projection layer.
const ConstTensor * m_RecurrentToCellWeights
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
const ConstTensor * m_RecurrentToInputWeights
Definition: LstmParams.hpp:44
A MeanDescriptor for the MeanLayer.
const ConstTensor * m_InputToOutputWeights
void Mean(const armnn::TensorInfo &inputInfo, const armnn::TensorInfo &outputInfo, const std::vector< unsigned int > &axis, Decoder< float > &input, Encoder< float > &output)
Definition: Mean.cpp:71
virtual const IOutputSlot * GetConnection() const =0
bool m_LayerNormEnabled
Enable/disable layer normalization.
uint32_t m_PadRight
Padding right value in the width dimension.
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
virtual const TensorInfo & GetTensorInfo() const =0
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
void SpaceToDepth(const TensorInfo &inputInfo, const TensorInfo &outputInfo, const SpaceToDepthDescriptor &params, Decoder< float > &inputData, Encoder< float > &outputData)
float m_ScaleY
Center size encoding scale y.
OriginsDescriptor CreateDescriptorForConcatenation(TensorShapeIt first, TensorShapeIt last, unsigned int concatenationDimension)
Convenience template to create an OriginsDescriptor to use when creating a ConcatLayer for performing...
#define DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(name)
float m_NmsScoreThreshold
NMS score threshold.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
void BatchToSpaceNd(const DataLayoutIndexed &dataLayout, const TensorInfo &inputTensorInfo, const TensorInfo &outputTensorInfo, const std::vector< unsigned int > &blockShape, const std::vector< std::pair< unsigned int, unsigned int >> &cropsData, Decoder< float > &inputDecoder, Encoder< float > &outputEncoder)
void Pad(const TensorInfo &inputInfo, const TensorInfo &outputInfo, const PadQueueDescriptor &data)
Definition: Pad.cpp:39
virtual int Connect(IInputSlot &destination)=0
DataType GetDataType() const
Definition: Tensor.hpp:271
A Pooling2dDescriptor for the Pooling2dLayer.
const ConstTensor * m_OutputLayerNormWeights
Definition: LstmParams.hpp:60
A NormalizationDescriptor for the NormalizationLayer.
void Pooling2d(Decoder< float > &rInputDecoder, Encoder< float > &rOutputEncoder, const TensorInfo &inputInfo, const TensorInfo &outputInfo, const Pooling2dDescriptor &params)
Computes the Pooling2d operation.
Definition: Pooling2d.cpp:143
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
A ResizeBilinearDescriptor for the ResizeBilinearLayer.
void Splitter(const SplitterQueueDescriptor &data)
Definition: Splitter.hpp:17
float m_CellIntermediateScale
Cell intermediate quantization scale.
void Softmax(Decoder< float > &in, Encoder< float > &out, const TensorInfo &inputTensorInfo, float beta, int axis)
Computes the softmax function on some inputs, into outputs, with a shape given by tensorInfo...
Definition: Softmax.cpp:17
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:50
A SoftmaxDescriptor for the SoftmaxLayer.
bool m_CifgEnabled
Enable/disable CIFG (coupled input & forget gate).
Status SetViewOriginCoord(uint32_t view, uint32_t coord, uint32_t value)
Set the view origin coordinates.
void Resize(Decoder< float > &in, const TensorInfo &inputInfo, Encoder< float > &out, const TensorInfo &outputInfo, DataLayoutIndexed dataLayout, armnn::ResizeMethod resizeMethod, bool alignCorners, bool halfPixelCenters)
Definition: Resize.cpp:65
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
A FillDescriptor for the FillLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
unsigned int GetNumBytes() const
Definition: Tensor.hpp:273
const ConstTensor * m_InputToForgetWeights
Definition: LstmParams.hpp:41
A PermuteDescriptor for the PermuteLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
int32_t m_HiddenStateZeroPoint
Hidden State zero point.
const ConstTensor * m_InputToInputWeights
Definition: LstmParams.hpp:40
std::vector< float > anchors({ 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 100.5f, 1.0f, 1.0f })