From b454c5c65efb238c130b042ace390b2bc7f0bf75 Mon Sep 17 00:00:00 2001 From: Finn Williams Date: Tue, 9 Feb 2021 15:56:23 +0000 Subject: IVGCVSW-4893 Refactor ILayerVisitor using unified interface strategy. Signed-off-by: Jan Eilers Signed-off-by: Finn Williams Signed-off-by: Francis Murtagh Change-Id: Id7bc8255a8e3f9e5aac65d510bec8a559bf37246 --- .../test/ActivationSerializationTests.cpp | 19 +- .../test/ComparisonSerializationTests.cpp | 123 + .../test/LstmSerializationTests.cpp | 2199 +++++++++++++ src/armnnSerializer/test/SerializerTestUtils.cpp | 163 + src/armnnSerializer/test/SerializerTestUtils.hpp | 167 + src/armnnSerializer/test/SerializerTests.cpp | 3461 ++------------------ 6 files changed, 3008 insertions(+), 3124 deletions(-) create mode 100644 src/armnnSerializer/test/ComparisonSerializationTests.cpp create mode 100644 src/armnnSerializer/test/LstmSerializationTests.cpp create mode 100644 src/armnnSerializer/test/SerializerTestUtils.cpp create mode 100644 src/armnnSerializer/test/SerializerTestUtils.hpp (limited to 'src/armnnSerializer/test') diff --git a/src/armnnSerializer/test/ActivationSerializationTests.cpp b/src/armnnSerializer/test/ActivationSerializationTests.cpp index 1645731413..fbe1ae0ad4 100644 --- a/src/armnnSerializer/test/ActivationSerializationTests.cpp +++ b/src/armnnSerializer/test/ActivationSerializationTests.cpp @@ -17,15 +17,20 @@ BOOST_AUTO_TEST_SUITE(SerializerTests) -class VerifyActivationName : public armnn::LayerVisitorBase +class VerifyActivationName : public armnn::IStrategy { public: - void VisitActivationLayer(const armnn::IConnectableLayer* layer, - const armnn::ActivationDescriptor& activationDescriptor, - const char* name) override + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override { - IgnoreUnused(layer, activationDescriptor); - BOOST_TEST(name == "activation"); + IgnoreUnused(layer, descriptor, constants, id); + if (layer->GetType() == armnn::LayerType::Activation) + { + BOOST_TEST(name == "activation"); + } } }; @@ -67,7 +72,7 @@ BOOST_AUTO_TEST_CASE(ActivationSerialization) armnn::INetworkPtr deserializedNetwork = parser->CreateNetworkFromBinary(serializerVector); VerifyActivationName visitor; - deserializedNetwork->Accept(visitor); + deserializedNetwork->ExecuteStrategy(visitor); armnn::IRuntime::CreationOptions options; // default options armnn::IRuntimePtr run = armnn::IRuntime::Create(options); diff --git a/src/armnnSerializer/test/ComparisonSerializationTests.cpp b/src/armnnSerializer/test/ComparisonSerializationTests.cpp new file mode 100644 index 0000000000..3aee9a7bcb --- /dev/null +++ b/src/armnnSerializer/test/ComparisonSerializationTests.cpp @@ -0,0 +1,123 @@ +// +// Copyright © 2021 Arm Ltd and Contributors. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include "../Serializer.hpp" +#include "SerializerTestUtils.hpp" + +#include +#include +#include +#include +#include + +#include + + +BOOST_AUTO_TEST_SUITE(SerializerTests) + +struct ComparisonModel +{ + ComparisonModel(const std::string& layerName, + const armnn::TensorInfo& inputInfo, + const armnn::TensorInfo& outputInfo, + armnn::ComparisonDescriptor& descriptor) + : m_network(armnn::INetwork::Create()) + { + armnn::IConnectableLayer* const inputLayer0 = m_network->AddInputLayer(0); + armnn::IConnectableLayer* const inputLayer1 = m_network->AddInputLayer(1); + armnn::IConnectableLayer* const equalLayer = m_network->AddComparisonLayer(descriptor, layerName.c_str()); + armnn::IConnectableLayer* const outputLayer = m_network->AddOutputLayer(0); + + inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0)); + inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1)); + equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0)); + + inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo); + inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo); + equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo); + } + + armnn::INetworkPtr m_network; +}; + +class ComparisonLayerVerifier : public LayerVerifierBase +{ +public: + ComparisonLayerVerifier(const std::string& layerName, + const std::vector& inputInfos, + const std::vector& outputInfos, + const armnn::ComparisonDescriptor& descriptor) + : LayerVerifierBase(layerName, inputInfos, outputInfos) + , m_Descriptor (descriptor) {} + + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override + { + armnn::IgnoreUnused(descriptor, constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Comparison: + { + VerifyNameAndConnections(layer, name); + const armnn::ComparisonDescriptor& layerDescriptor = + static_cast(descriptor); + BOOST_CHECK(layerDescriptor.m_Operation == m_Descriptor.m_Operation); + break; + } + default: + { + throw armnn::Exception("Unexpected layer type in Comparison test model"); + } + } + } + +private: + armnn::ComparisonDescriptor m_Descriptor; +}; + +BOOST_AUTO_TEST_CASE(SerializeEqual) +{ + const std::string layerName("equal"); + + const armnn::TensorShape shape{2, 1, 2, 4}; + const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32); + const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean); + + armnn::ComparisonDescriptor descriptor (armnn::ComparisonOperation::Equal); + + ComparisonModel model(layerName, inputInfo, outputInfo, descriptor); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*model.m_network)); + BOOST_CHECK(deserializedNetwork); + + ComparisonLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); +} + +BOOST_AUTO_TEST_CASE(SerializeGreater) +{ + const std::string layerName("greater"); + + const armnn::TensorShape shape{2, 1, 2, 4}; + const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32); + const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean); + + armnn::ComparisonDescriptor descriptor (armnn::ComparisonOperation::Greater); + + ComparisonModel model(layerName, inputInfo, outputInfo, descriptor); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*model.m_network)); + BOOST_CHECK(deserializedNetwork); + + ComparisonLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnSerializer/test/LstmSerializationTests.cpp b/src/armnnSerializer/test/LstmSerializationTests.cpp new file mode 100644 index 0000000000..4705c0bd28 --- /dev/null +++ b/src/armnnSerializer/test/LstmSerializationTests.cpp @@ -0,0 +1,2199 @@ +// +// Copyright © 2021 Arm Ltd and Contributors. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include "../Serializer.hpp" +#include "SerializerTestUtils.hpp" + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include + + +BOOST_AUTO_TEST_SUITE(SerializerTests) + +template +armnn::LstmInputParams ConstantVector2LstmInputParams(const std::vector& constants, + Descriptor& descriptor) +{ + armnn::LstmInputParams lstmInputParams; + size_t i = 0; + + // Inserting basic paramters + lstmInputParams.m_InputToForgetWeights = &constants[i++]; + lstmInputParams.m_InputToCellWeights = &constants[i++]; + lstmInputParams.m_InputToOutputWeights = &constants[i++]; + lstmInputParams.m_RecurrentToForgetWeights = &constants[i++]; + lstmInputParams.m_RecurrentToCellWeights = &constants[i++]; + lstmInputParams.m_RecurrentToOutputWeights = &constants[i++]; + lstmInputParams.m_ForgetGateBias = &constants[i++]; + lstmInputParams.m_CellBias = &constants[i++]; + lstmInputParams.m_OutputGateBias = &constants[i++]; + if (!descriptor.m_CifgEnabled) + { + lstmInputParams.m_InputToInputWeights = &constants[i++]; + lstmInputParams.m_RecurrentToInputWeights = &constants[i++]; + lstmInputParams.m_InputGateBias = &constants[i++]; + } + + if (descriptor.m_PeepholeEnabled) + { + if (!descriptor.m_CifgEnabled) + { + lstmInputParams.m_CellToInputWeights = &constants[i++]; + } + lstmInputParams.m_CellToForgetWeights = &constants[i++]; + lstmInputParams.m_CellToOutputWeights = &constants[i++]; + } + + if (descriptor.m_ProjectionEnabled) + { + lstmInputParams.m_ProjectionWeights = &constants[i++]; + lstmInputParams.m_ProjectionBias = &constants[i++]; + } + + if (descriptor.m_LayerNormEnabled) + { + if (!descriptor.m_CifgEnabled) + { + lstmInputParams.m_InputLayerNormWeights = &constants[i++]; + } + lstmInputParams.m_ForgetLayerNormWeights = &constants[i++]; + lstmInputParams.m_CellLayerNormWeights = &constants[i++]; + lstmInputParams.m_OutputLayerNormWeights = &constants[i++]; + } + + return lstmInputParams; +} + +// Works for Lstm and QLstm (QuantizedLstm uses different parameters) +template +class VerifyLstmLayer : public LayerVerifierBaseWithDescriptor +{ +public: + VerifyLstmLayer(const std::string& layerName, + const std::vector& inputInfos, + const std::vector& outputInfos, + const Descriptor& descriptor, + const armnn::LstmInputParams& inputParams) + : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) + , m_InputParams(inputParams) {} + + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override + { + armnn::IgnoreUnused(constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Lstm: + { + this->VerifyNameAndConnections(layer, name); + const Descriptor& internalDescriptor = static_cast(descriptor); + this->VerifyDescriptor(internalDescriptor); + armnn::LstmInputParams lstmParams = ConstantVector2LstmInputParams(constants, internalDescriptor); + VerifyInputParameters(lstmParams); + break; + } + case armnn::LayerType::QLstm: + { + this->VerifyNameAndConnections(layer, name); + const Descriptor& internalDescriptor = static_cast(descriptor); + this->VerifyDescriptor(internalDescriptor); + armnn::LstmInputParams lstmParams = ConstantVector2LstmInputParams(constants, internalDescriptor); + VerifyInputParameters(lstmParams); + break; + } + default: + { + throw armnn::Exception("Unexpected layer type in Lstm test model"); + } + } + } + +protected: + void VerifyInputParameters(const armnn::LstmInputParams& params) + { + this->VerifyConstTensors( + "m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights); + this->VerifyConstTensors( + "m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights); + this->VerifyConstTensors( + "m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights); + this->VerifyConstTensors( + "m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights); + this->VerifyConstTensors( + "m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights); + this->VerifyConstTensors( + "m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights); + this->VerifyConstTensors( + "m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights); + this->VerifyConstTensors( + "m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights); + this->VerifyConstTensors( + "m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights); + this->VerifyConstTensors( + "m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights); + this->VerifyConstTensors( + "m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights); + this->VerifyConstTensors( + "m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias); + this->VerifyConstTensors( + "m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias); + this->VerifyConstTensors( + "m_CellBias", m_InputParams.m_CellBias, params.m_CellBias); + this->VerifyConstTensors( + "m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias); + this->VerifyConstTensors( + "m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights); + this->VerifyConstTensors( + "m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias); + this->VerifyConstTensors( + "m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights); + this->VerifyConstTensors( + "m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights); + this->VerifyConstTensors( + "m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights); + this->VerifyConstTensors( + "m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights); + } + +private: + armnn::LstmInputParams m_InputParams; +}; + +BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmCifgPeepholeNoProjection) +{ + armnn::LstmDescriptor descriptor; + descriptor.m_ActivationFunc = 4; + descriptor.m_ClippingThresProj = 0.0f; + descriptor.m_ClippingThresCell = 0.0f; + descriptor.m_CifgEnabled = true; // if this is true then we DON'T need to set the OptCifgParams + descriptor.m_ProjectionEnabled = false; + descriptor.m_PeepholeEnabled = true; + + const uint32_t batchSize = 1; + const uint32_t inputSize = 2; + const uint32_t numUnits = 4; + const uint32_t outputSize = numUnits; + + armnn::TensorInfo inputWeightsInfo1({numUnits, inputSize}, armnn::DataType::Float32); + std::vector inputToForgetWeightsData = GenerateRandomData(inputWeightsInfo1.GetNumElements()); + armnn::ConstTensor inputToForgetWeights(inputWeightsInfo1, inputToForgetWeightsData); + + std::vector inputToCellWeightsData = GenerateRandomData(inputWeightsInfo1.GetNumElements()); + armnn::ConstTensor inputToCellWeights(inputWeightsInfo1, inputToCellWeightsData); + + std::vector inputToOutputWeightsData = GenerateRandomData(inputWeightsInfo1.GetNumElements()); + armnn::ConstTensor inputToOutputWeights(inputWeightsInfo1, inputToOutputWeightsData); + + armnn::TensorInfo inputWeightsInfo2({numUnits, outputSize}, armnn::DataType::Float32); + std::vector recurrentToForgetWeightsData = GenerateRandomData(inputWeightsInfo2.GetNumElements()); + armnn::ConstTensor recurrentToForgetWeights(inputWeightsInfo2, recurrentToForgetWeightsData); + + std::vector recurrentToCellWeightsData = GenerateRandomData(inputWeightsInfo2.GetNumElements()); + armnn::ConstTensor recurrentToCellWeights(inputWeightsInfo2, recurrentToCellWeightsData); + + std::vector recurrentToOutputWeightsData = GenerateRandomData(inputWeightsInfo2.GetNumElements()); + armnn::ConstTensor recurrentToOutputWeights(inputWeightsInfo2, recurrentToOutputWeightsData); + + armnn::TensorInfo inputWeightsInfo3({numUnits}, armnn::DataType::Float32); + std::vector cellToForgetWeightsData = GenerateRandomData(inputWeightsInfo3.GetNumElements()); + armnn::ConstTensor cellToForgetWeights(inputWeightsInfo3, cellToForgetWeightsData); + + std::vector cellToOutputWeightsData = GenerateRandomData(inputWeightsInfo3.GetNumElements()); + armnn::ConstTensor cellToOutputWeights(inputWeightsInfo3, cellToOutputWeightsData); + + std::vector forgetGateBiasData(numUnits, 1.0f); + armnn::ConstTensor forgetGateBias(inputWeightsInfo3, forgetGateBiasData); + + std::vector cellBiasData(numUnits, 0.0f); + armnn::ConstTensor cellBias(inputWeightsInfo3, cellBiasData); + + std::vector outputGateBiasData(numUnits, 0.0f); + armnn::ConstTensor outputGateBias(inputWeightsInfo3, outputGateBiasData); + + armnn::LstmInputParams params; + params.m_InputToForgetWeights = &inputToForgetWeights; + params.m_InputToCellWeights = &inputToCellWeights; + params.m_InputToOutputWeights = &inputToOutputWeights; + params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; + params.m_RecurrentToCellWeights = &recurrentToCellWeights; + params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; + params.m_ForgetGateBias = &forgetGateBias; + params.m_CellBias = &cellBias; + params.m_OutputGateBias = &outputGateBias; + params.m_CellToForgetWeights = &cellToForgetWeights; + params.m_CellToOutputWeights = &cellToOutputWeights; + + armnn::INetworkPtr network = armnn::INetwork::Create(); + armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); + armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1); + armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2); + const std::string layerName("lstm"); + armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str()); + armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0); + armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1); + armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2); + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3); + + // connect up + armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32); + armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32); + armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32); + armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 3 }, armnn::DataType::Float32); + + inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0)); + inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo); + + outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1)); + outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo); + + cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2)); + cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); + + lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0)); + lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff); + + lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0)); + lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo); + + lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0)); + lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo); + + lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0)); + lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); + + VerifyLstmLayer checker( + layerName, + {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo}, + {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, + descriptor, + params); + deserializedNetwork->ExecuteStrategy(checker); +} + +BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeAndProjection) +{ + armnn::LstmDescriptor descriptor; + descriptor.m_ActivationFunc = 4; + descriptor.m_ClippingThresProj = 0.0f; + descriptor.m_ClippingThresCell = 0.0f; + descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams + descriptor.m_ProjectionEnabled = true; + descriptor.m_PeepholeEnabled = true; + + const uint32_t batchSize = 2; + const uint32_t inputSize = 5; + const uint32_t numUnits = 20; + const uint32_t outputSize = 16; + + armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32); + std::vector inputToInputWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); + armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData); + + std::vector inputToForgetWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); + armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData); + + std::vector inputToCellWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); + armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData); + + std::vector inputToOutputWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); + armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData); + + armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32); + std::vector inputGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData); + + std::vector forgetGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData); + + std::vector cellBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor cellBias(tensorInfo20, cellBiasData); + + std::vector outputGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData); + + armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32); + std::vector recurrentToInputWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); + armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData); + + std::vector recurrentToForgetWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); + armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData); + + std::vector recurrentToCellWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); + armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData); + + std::vector recurrentToOutputWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); + armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData); + + std::vector cellToInputWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData); + + std::vector cellToForgetWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData); + + std::vector cellToOutputWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData); + + armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32); + std::vector projectionWeightsData = GenerateRandomData(tensorInfo16x20.GetNumElements()); + armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData); + + armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32); + std::vector projectionBiasData(outputSize, 0.f); + armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData); + + armnn::LstmInputParams params; + params.m_InputToForgetWeights = &inputToForgetWeights; + params.m_InputToCellWeights = &inputToCellWeights; + params.m_InputToOutputWeights = &inputToOutputWeights; + params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; + params.m_RecurrentToCellWeights = &recurrentToCellWeights; + params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; + params.m_ForgetGateBias = &forgetGateBias; + params.m_CellBias = &cellBias; + params.m_OutputGateBias = &outputGateBias; + + // additional params because: descriptor.m_CifgEnabled = false + params.m_InputToInputWeights = &inputToInputWeights; + params.m_RecurrentToInputWeights = &recurrentToInputWeights; + params.m_CellToInputWeights = &cellToInputWeights; + params.m_InputGateBias = &inputGateBias; + + // additional params because: descriptor.m_ProjectionEnabled = true + params.m_ProjectionWeights = &projectionWeights; + params.m_ProjectionBias = &projectionBias; + + // additional params because: descriptor.m_PeepholeEnabled = true + params.m_CellToForgetWeights = &cellToForgetWeights; + params.m_CellToOutputWeights = &cellToOutputWeights; + + armnn::INetworkPtr network = armnn::INetwork::Create(); + armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); + armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1); + armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2); + const std::string layerName("lstm"); + armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str()); + armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0); + armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1); + armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2); + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3); + + // connect up + armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32); + armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32); + armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32); + armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32); + + inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0)); + inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo); + + outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1)); + outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo); + + cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2)); + cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); + + lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0)); + lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff); + + lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0)); + lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo); + + lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0)); + lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo); + + lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0)); + lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); + + VerifyLstmLayer checker( + layerName, + {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo}, + {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, + descriptor, + params); + deserializedNetwork->ExecuteStrategy(checker); +} + +BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeWithProjectionWithLayerNorm) +{ + armnn::LstmDescriptor descriptor; + descriptor.m_ActivationFunc = 4; + descriptor.m_ClippingThresProj = 0.0f; + descriptor.m_ClippingThresCell = 0.0f; + descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams + descriptor.m_ProjectionEnabled = true; + descriptor.m_PeepholeEnabled = true; + descriptor.m_LayerNormEnabled = true; + + const uint32_t batchSize = 2; + const uint32_t inputSize = 5; + const uint32_t numUnits = 20; + const uint32_t outputSize = 16; + + armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32); + std::vector inputToInputWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); + armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData); + + std::vector inputToForgetWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); + armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData); + + std::vector inputToCellWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); + armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData); + + std::vector inputToOutputWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); + armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData); + + armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32); + std::vector inputGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData); + + std::vector forgetGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData); + + std::vector cellBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor cellBias(tensorInfo20, cellBiasData); + + std::vector outputGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData); + + armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32); + std::vector recurrentToInputWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); + armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData); + + std::vector recurrentToForgetWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); + armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData); + + std::vector recurrentToCellWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); + armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData); + + std::vector recurrentToOutputWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); + armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData); + + std::vector cellToInputWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData); + + std::vector cellToForgetWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData); + + std::vector cellToOutputWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData); + + armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32); + std::vector projectionWeightsData = GenerateRandomData(tensorInfo16x20.GetNumElements()); + armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData); + + armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32); + std::vector projectionBiasData(outputSize, 0.f); + armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData); + + std::vector inputLayerNormWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor inputLayerNormWeights(tensorInfo20, forgetGateBiasData); + + std::vector forgetLayerNormWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor forgetLayerNormWeights(tensorInfo20, forgetGateBiasData); + + std::vector cellLayerNormWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor cellLayerNormWeights(tensorInfo20, forgetGateBiasData); + + std::vector outLayerNormWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); + armnn::ConstTensor outLayerNormWeights(tensorInfo20, forgetGateBiasData); + + armnn::LstmInputParams params; + params.m_InputToForgetWeights = &inputToForgetWeights; + params.m_InputToCellWeights = &inputToCellWeights; + params.m_InputToOutputWeights = &inputToOutputWeights; + params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; + params.m_RecurrentToCellWeights = &recurrentToCellWeights; + params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; + params.m_ForgetGateBias = &forgetGateBias; + params.m_CellBias = &cellBias; + params.m_OutputGateBias = &outputGateBias; + + // additional params because: descriptor.m_CifgEnabled = false + params.m_InputToInputWeights = &inputToInputWeights; + params.m_RecurrentToInputWeights = &recurrentToInputWeights; + params.m_CellToInputWeights = &cellToInputWeights; + params.m_InputGateBias = &inputGateBias; + + // additional params because: descriptor.m_ProjectionEnabled = true + params.m_ProjectionWeights = &projectionWeights; + params.m_ProjectionBias = &projectionBias; + + // additional params because: descriptor.m_PeepholeEnabled = true + params.m_CellToForgetWeights = &cellToForgetWeights; + params.m_CellToOutputWeights = &cellToOutputWeights; + + // additional params because: despriptor.m_LayerNormEnabled = true + params.m_InputLayerNormWeights = &inputLayerNormWeights; + params.m_ForgetLayerNormWeights = &forgetLayerNormWeights; + params.m_CellLayerNormWeights = &cellLayerNormWeights; + params.m_OutputLayerNormWeights = &outLayerNormWeights; + + armnn::INetworkPtr network = armnn::INetwork::Create(); + armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); + armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1); + armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2); + const std::string layerName("lstm"); + armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str()); + armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0); + armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1); + armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2); + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3); + + // connect up + armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32); + armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32); + armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32); + armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32); + + inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0)); + inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo); + + outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1)); + outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo); + + cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2)); + cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); + + lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0)); + lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff); + + lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0)); + lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo); + + lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0)); + lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo); + + lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0)); + lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); + + VerifyLstmLayer checker( + layerName, + {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo}, + {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, + descriptor, + params); + deserializedNetwork->ExecuteStrategy(checker); +} + +BOOST_AUTO_TEST_CASE(EnsureLstmLayersBackwardCompatibility) +{ + // The hex data below is a flat buffer containing a lstm layer with no Cifg, with peephole and projection + // enabled. That data was obtained before additional layer normalization parameters where added to the + // lstm serializer. That way it can be tested if a lstm model with the old parameter configuration can + // still be loaded + const std::vector lstmNoCifgWithPeepholeAndProjectionModel = + { + 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, + 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0xDC, 0x29, 0x00, 0x00, 0x38, 0x29, 0x00, 0x00, 0xB4, 0x28, 0x00, 0x00, 0x94, 0x01, 0x00, 0x00, 0x3C, 0x01, + 0x00, 0x00, 0xE0, 0x00, 0x00, 0x00, 0x84, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, + 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x70, 0xD6, 0xFF, 0xFF, + 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x06, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x88, 0xD7, + 0xFF, 0xFF, 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xF6, 0xD6, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xE8, 0xD7, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xC8, 0xD6, 0xFF, 0xFF, 0x00, 0x00, + 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x5E, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xE0, 0xD7, 0xFF, 0xFF, + 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x4E, 0xD7, 0xFF, 0xFF, 0x06, 0x00, 0x00, 0x00, 0x10, 0x00, + 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0xD8, + 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, + 0x04, 0x00, 0x00, 0x00, 0xB6, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x38, 0xD8, 0xFF, 0xFF, 0x08, 0x00, + 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xA6, 0xD7, 0xFF, 0xFF, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x98, 0xD8, 0xFF, 0xFF, + 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x78, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, + 0x00, 0x00, 0x0E, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x16, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, + 0xFA, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, + 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xEC, 0xD8, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x6C, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x23, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, + 0x12, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0xE0, 0x25, 0x00, 0x00, 0xD0, 0x25, + 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x26, 0x00, 0x48, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, + 0x10, 0x00, 0x14, 0x00, 0x18, 0x00, 0x1C, 0x00, 0x20, 0x00, 0x24, 0x00, 0x28, 0x00, 0x2C, 0x00, 0x30, 0x00, + 0x34, 0x00, 0x38, 0x00, 0x3C, 0x00, 0x40, 0x00, 0x44, 0x00, 0x26, 0x00, 0x00, 0x00, 0xC4, 0x23, 0x00, 0x00, + 0xF8, 0x21, 0x00, 0x00, 0x2C, 0x20, 0x00, 0x00, 0xF0, 0x1A, 0x00, 0x00, 0xB4, 0x15, 0x00, 0x00, 0x78, 0x10, + 0x00, 0x00, 0xF0, 0x0F, 0x00, 0x00, 0x68, 0x0F, 0x00, 0x00, 0xE0, 0x0E, 0x00, 0x00, 0x14, 0x0D, 0x00, 0x00, + 0xD8, 0x07, 0x00, 0x00, 0x50, 0x07, 0x00, 0x00, 0xC8, 0x06, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x14, 0x01, + 0x00, 0x00, 0x8C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, + 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x5A, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, + 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x72, 0xD8, + 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x82, 0xD9, 0xFF, 0xFF, + 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDE, 0xD8, + 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0xF6, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x54, 0x00, 0x00, 0x00, 0x04, 0x00, + 0x00, 0x00, 0x06, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xD9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6A, 0xD9, 0xFF, 0xFF, 0x00, 0x00, + 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7A, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, + 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x86, 0xDE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xA2, 0xDE, + 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB2, 0xDF, 0xFF, 0xFF, + 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0xDF, + 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x26, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, + 0x00, 0x00, 0x36, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x92, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xAA, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, + 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xBA, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0xC6, 0xE4, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xE2, 0xE4, 0xFF, 0xFF, + 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF2, 0xE5, 0xFF, 0xFF, 0x04, 0x00, + 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8E, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, + 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, + 0x00, 0x00, 0xAA, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0xBA, 0xE7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x16, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x2E, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, + 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x3E, 0xE8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9A, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xB2, 0xE7, 0xFF, 0xFF, + 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xC2, 0xE8, 0xFF, 0xFF, 0x04, 0x00, + 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1E, 0xE8, 0xFF, 0xFF, + 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, + 0x00, 0x00, 0x36, 0xE8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x46, 0xE9, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xED, 0xFF, 0xFF, + 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, + 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6E, 0xED, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x7E, 0xEE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x8A, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, + 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xA6, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, + 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB6, 0xF3, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0xC2, 0xF7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xDE, 0xF7, 0xFF, 0xFF, + 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xF8, 0xFF, 0xFF, 0x04, 0x00, + 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8A, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, + 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, + 0x00, 0x00, 0xA6, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0xB6, 0xFA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xFB, + 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x6E, 0xFB, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, + 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFC, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x1A, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x0C, 0x00, + 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x01, 0x04, 0x00, 0x00, 0x00, 0x2E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x22, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6C, 0x73, + 0x74, 0x6D, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xEC, 0x00, 0x00, 0x00, 0xD0, 0x00, 0x00, 0x00, + 0xB4, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x88, 0x00, 0x00, 0x00, 0x5C, 0x00, 0x00, 0x00, 0x30, 0x00, + 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x14, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0xA6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, + 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x3C, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x64, 0xFF, 0xFF, 0xFF, + 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0xB4, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x1A, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, + 0xF0, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, + 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, + 0x7E, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, + 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x76, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x68, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x0E, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, + 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, + 0x0E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, + 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, + 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6E, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x08, 0x00, + 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, + 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, + 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, + 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, + 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, + 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, + 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, + 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00 + }; + + armnn::INetworkPtr deserializedNetwork = + DeserializeNetwork(std::string(lstmNoCifgWithPeepholeAndProjectionModel.begin(), + lstmNoCifgWithPeepholeAndProjectionModel.end())); + + BOOST_CHECK(deserializedNetwork); + + // generating the same model parameters which where used to serialize the model (Layer norm is not specified) + armnn::LstmDescriptor descriptor; + descriptor.m_ActivationFunc = 4; + descriptor.m_ClippingThresProj = 0.0f; + descriptor.m_ClippingThresCell = 0.0f; + descriptor.m_CifgEnabled = false; + descriptor.m_ProjectionEnabled = true; + descriptor.m_PeepholeEnabled = true; + + const uint32_t batchSize = 2u; + const uint32_t inputSize = 5u; + const uint32_t numUnits = 20u; + const uint32_t outputSize = 16u; + + armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32); + std::vector inputToInputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f); + armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData); + + std::vector inputToForgetWeightsData(tensorInfo20x5.GetNumElements(), 0.0f); + armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData); + + std::vector inputToCellWeightsData(tensorInfo20x5.GetNumElements(), 0.0f); + armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData); + + std::vector inputToOutputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f); + armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData); + + armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32); + std::vector inputGateBiasData(tensorInfo20.GetNumElements(), 0.0f); + armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData); + + std::vector forgetGateBiasData(tensorInfo20.GetNumElements(), 0.0f); + armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData); + + std::vector cellBiasData(tensorInfo20.GetNumElements(), 0.0f); + armnn::ConstTensor cellBias(tensorInfo20, cellBiasData); + + std::vector outputGateBiasData(tensorInfo20.GetNumElements(), 0.0f); + armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData); + + armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32); + std::vector recurrentToInputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f); + armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData); + + std::vector recurrentToForgetWeightsData(tensorInfo20x16.GetNumElements(), 0.0f); + armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData); + + std::vector recurrentToCellWeightsData(tensorInfo20x16.GetNumElements(), 0.0f); + armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData); + + std::vector recurrentToOutputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f); + armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData); + + std::vector cellToInputWeightsData(tensorInfo20.GetNumElements(), 0.0f); + armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData); + + std::vector cellToForgetWeightsData(tensorInfo20.GetNumElements(), 0.0f); + armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData); + + std::vector cellToOutputWeightsData(tensorInfo20.GetNumElements(), 0.0f); + armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData); + + armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32); + std::vector projectionWeightsData(tensorInfo16x20.GetNumElements(), 0.0f); + armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData); + + armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32); + std::vector projectionBiasData(outputSize, 0.0f); + armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData); + + armnn::LstmInputParams params; + params.m_InputToForgetWeights = &inputToForgetWeights; + params.m_InputToCellWeights = &inputToCellWeights; + params.m_InputToOutputWeights = &inputToOutputWeights; + params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; + params.m_RecurrentToCellWeights = &recurrentToCellWeights; + params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; + params.m_ForgetGateBias = &forgetGateBias; + params.m_CellBias = &cellBias; + params.m_OutputGateBias = &outputGateBias; + + // additional params because: descriptor.m_CifgEnabled = false + params.m_InputToInputWeights = &inputToInputWeights; + params.m_RecurrentToInputWeights = &recurrentToInputWeights; + params.m_CellToInputWeights = &cellToInputWeights; + params.m_InputGateBias = &inputGateBias; + + // additional params because: descriptor.m_ProjectionEnabled = true + params.m_ProjectionWeights = &projectionWeights; + params.m_ProjectionBias = &projectionBias; + + // additional params because: descriptor.m_PeepholeEnabled = true + params.m_CellToForgetWeights = &cellToForgetWeights; + params.m_CellToOutputWeights = &cellToOutputWeights; + + const std::string layerName("lstm"); + armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32); + armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32); + armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32); + armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32); + + VerifyLstmLayer checker( + layerName, + {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo}, + {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, + descriptor, + params); + deserializedNetwork->ExecuteStrategy(checker); +} + +armnn::QuantizedLstmInputParams ConstantsVector2QuantizedLstmInputParams( + const std::vector& constants) +{ + armnn::QuantizedLstmInputParams params; + + // index for constants vector + size_t i = 0; + + // Get input parameters + params.m_InputToInputWeights = &constants[i++]; + params.m_InputToForgetWeights = &constants[i++]; + params.m_InputToCellWeights = &constants[i++]; + params.m_InputToOutputWeights = &constants[i++]; + + params.m_RecurrentToInputWeights = &constants[i++]; + params.m_RecurrentToForgetWeights = &constants[i++]; + params.m_RecurrentToCellWeights = &constants[i++]; + params.m_RecurrentToOutputWeights = &constants[i++]; + + params.m_InputGateBias = &constants[i++]; + params.m_ForgetGateBias = &constants[i++]; + params.m_CellBias = &constants[i++]; + params.m_OutputGateBias = &constants[i++]; + + return params; +} + +class VerifyQuantizedLstmLayer : public LayerVerifierBase +{ + +public: + VerifyQuantizedLstmLayer(const std::string& layerName, + const std::vector& inputInfos, + const std::vector& outputInfos, + const armnn::QuantizedLstmInputParams& inputParams) + : LayerVerifierBase(layerName, inputInfos, outputInfos), m_InputParams(inputParams) {} + + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override + { + armnn::IgnoreUnused(descriptor, constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::QuantizedLstm: + { + VerifyNameAndConnections(layer, name); + armnn::QuantizedLstmInputParams params = ConstantsVector2QuantizedLstmInputParams(constants); + VerifyInputParameters(params); + break; + } + default: + { + throw armnn::Exception(fmt::format("Unexpected layer type in QuantizedLstm test model:", + layer->GetName())); + } + } + } + +protected: + void VerifyInputParameters(const armnn::QuantizedLstmInputParams& params) + { + VerifyConstTensors("m_InputToInputWeights", + m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights); + VerifyConstTensors("m_InputToForgetWeights", + m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights); + VerifyConstTensors("m_InputToCellWeights", + m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights); + VerifyConstTensors("m_InputToOutputWeights", + m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights); + VerifyConstTensors("m_RecurrentToInputWeights", + m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights); + VerifyConstTensors("m_RecurrentToForgetWeights", + m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights); + VerifyConstTensors("m_RecurrentToCellWeights", + m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights); + VerifyConstTensors("m_RecurrentToOutputWeights", + m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights); + VerifyConstTensors("m_InputGateBias", + m_InputParams.m_InputGateBias, params.m_InputGateBias); + VerifyConstTensors("m_ForgetGateBias", + m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias); + VerifyConstTensors("m_CellBias", + m_InputParams.m_CellBias, params.m_CellBias); + VerifyConstTensors("m_OutputGateBias", + m_InputParams.m_OutputGateBias, params.m_OutputGateBias); + } + +private: + armnn::QuantizedLstmInputParams m_InputParams; +}; + +BOOST_AUTO_TEST_CASE(SerializeDeserializeQuantizedLstm) +{ + const uint32_t batchSize = 1; + const uint32_t inputSize = 2; + const uint32_t numUnits = 4; + const uint32_t outputSize = numUnits; + + // Scale/Offset for input/output, cellState In/Out, weights, bias + float inputOutputScale = 0.0078125f; + int32_t inputOutputOffset = 128; + + float cellStateScale = 0.00048828125f; + int32_t cellStateOffset = 0; + + float weightsScale = 0.00408021f; + int32_t weightsOffset = 100; + + float biasScale = 3.1876640625e-05f; + int32_t biasOffset = 0; + + // The shape of weight data is {outputSize, inputSize} = {4, 2} + armnn::TensorShape inputToInputWeightsShape = {4, 2}; + std::vector inputToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8}; + armnn::TensorInfo inputToInputWeightsInfo(inputToInputWeightsShape, + armnn::DataType::QAsymmU8, + weightsScale, + weightsOffset); + armnn::ConstTensor inputToInputWeights(inputToInputWeightsInfo, inputToInputWeightsData); + + armnn::TensorShape inputToForgetWeightsShape = {4, 2}; + std::vector inputToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8}; + armnn::TensorInfo inputToForgetWeightsInfo(inputToForgetWeightsShape, + armnn::DataType::QAsymmU8, + weightsScale, + weightsOffset); + armnn::ConstTensor inputToForgetWeights(inputToForgetWeightsInfo, inputToForgetWeightsData); + + armnn::TensorShape inputToCellWeightsShape = {4, 2}; + std::vector inputToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8}; + armnn::TensorInfo inputToCellWeightsInfo(inputToCellWeightsShape, + armnn::DataType::QAsymmU8, + weightsScale, + weightsOffset); + armnn::ConstTensor inputToCellWeights(inputToCellWeightsInfo, inputToCellWeightsData); + + armnn::TensorShape inputToOutputWeightsShape = {4, 2}; + std::vector inputToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8}; + armnn::TensorInfo inputToOutputWeightsInfo(inputToOutputWeightsShape, + armnn::DataType::QAsymmU8, + weightsScale, + weightsOffset); + armnn::ConstTensor inputToOutputWeights(inputToOutputWeightsInfo, inputToOutputWeightsData); + + // The shape of recurrent weight data is {outputSize, outputSize} = {4, 4} + armnn::TensorShape recurrentToInputWeightsShape = {4, 4}; + std::vector recurrentToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + armnn::TensorInfo recurrentToInputWeightsInfo(recurrentToInputWeightsShape, + armnn::DataType::QAsymmU8, + weightsScale, + weightsOffset); + armnn::ConstTensor recurrentToInputWeights(recurrentToInputWeightsInfo, recurrentToInputWeightsData); + + armnn::TensorShape recurrentToForgetWeightsShape = {4, 4}; + std::vector recurrentToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + armnn::TensorInfo recurrentToForgetWeightsInfo(recurrentToForgetWeightsShape, + armnn::DataType::QAsymmU8, + weightsScale, + weightsOffset); + armnn::ConstTensor recurrentToForgetWeights(recurrentToForgetWeightsInfo, recurrentToForgetWeightsData); + + armnn::TensorShape recurrentToCellWeightsShape = {4, 4}; + std::vector recurrentToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + armnn::TensorInfo recurrentToCellWeightsInfo(recurrentToCellWeightsShape, + armnn::DataType::QAsymmU8, + weightsScale, + weightsOffset); + armnn::ConstTensor recurrentToCellWeights(recurrentToCellWeightsInfo, recurrentToCellWeightsData); + + armnn::TensorShape recurrentToOutputWeightsShape = {4, 4}; + std::vector recurrentToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + armnn::TensorInfo recurrentToOutputWeightsInfo(recurrentToOutputWeightsShape, + armnn::DataType::QAsymmU8, + weightsScale, + weightsOffset); + armnn::ConstTensor recurrentToOutputWeights(recurrentToOutputWeightsInfo, recurrentToOutputWeightsData); + + // The shape of bias data is {outputSize} = {4} + armnn::TensorShape inputGateBiasShape = {4}; + std::vector inputGateBiasData = {1, 2, 3, 4}; + armnn::TensorInfo inputGateBiasInfo(inputGateBiasShape, + armnn::DataType::Signed32, + biasScale, + biasOffset); + armnn::ConstTensor inputGateBias(inputGateBiasInfo, inputGateBiasData); + + armnn::TensorShape forgetGateBiasShape = {4}; + std::vector forgetGateBiasData = {1, 2, 3, 4}; + armnn::TensorInfo forgetGateBiasInfo(forgetGateBiasShape, + armnn::DataType::Signed32, + biasScale, + biasOffset); + armnn::ConstTensor forgetGateBias(forgetGateBiasInfo, forgetGateBiasData); + + armnn::TensorShape cellBiasShape = {4}; + std::vector cellBiasData = {1, 2, 3, 4}; + armnn::TensorInfo cellBiasInfo(cellBiasShape, + armnn::DataType::Signed32, + biasScale, + biasOffset); + armnn::ConstTensor cellBias(cellBiasInfo, cellBiasData); + + armnn::TensorShape outputGateBiasShape = {4}; + std::vector outputGateBiasData = {1, 2, 3, 4}; + armnn::TensorInfo outputGateBiasInfo(outputGateBiasShape, + armnn::DataType::Signed32, + biasScale, + biasOffset); + armnn::ConstTensor outputGateBias(outputGateBiasInfo, outputGateBiasData); + + armnn::QuantizedLstmInputParams params; + params.m_InputToInputWeights = &inputToInputWeights; + params.m_InputToForgetWeights = &inputToForgetWeights; + params.m_InputToCellWeights = &inputToCellWeights; + params.m_InputToOutputWeights = &inputToOutputWeights; + params.m_RecurrentToInputWeights = &recurrentToInputWeights; + params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; + params.m_RecurrentToCellWeights = &recurrentToCellWeights; + params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; + params.m_InputGateBias = &inputGateBias; + params.m_ForgetGateBias = &forgetGateBias; + params.m_CellBias = &cellBias; + params.m_OutputGateBias = &outputGateBias; + + armnn::INetworkPtr network = armnn::INetwork::Create(); + armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); + armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1); + armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2); + const std::string layerName("QuantizedLstm"); + armnn::IConnectableLayer* const quantizedLstmLayer = network->AddQuantizedLstmLayer(params, layerName.c_str()); + armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(0); + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(1); + + // Connect up + armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, + armnn::DataType::QAsymmU8, + inputOutputScale, + inputOutputOffset); + armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits }, + armnn::DataType::QSymmS16, + cellStateScale, + cellStateOffset); + armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, + armnn::DataType::QAsymmU8, + inputOutputScale, + inputOutputOffset); + + inputLayer->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(0)); + inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo); + + cellStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(1)); + cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); + + outputStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(2)); + outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo); + + quantizedLstmLayer->GetOutputSlot(0).Connect(cellStateOut->GetInputSlot(0)); + quantizedLstmLayer->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); + + quantizedLstmLayer->GetOutputSlot(1).Connect(outputLayer->GetInputSlot(0)); + quantizedLstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); + + VerifyQuantizedLstmLayer checker(layerName, + {inputTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, + {cellStateTensorInfo, outputStateTensorInfo}, + params); + + deserializedNetwork->ExecuteStrategy(checker); +} + +BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmBasic) +{ + armnn::QLstmDescriptor descriptor; + + descriptor.m_CifgEnabled = true; + descriptor.m_ProjectionEnabled = false; + descriptor.m_PeepholeEnabled = false; + descriptor.m_LayerNormEnabled = false; + + descriptor.m_CellClip = 0.0f; + descriptor.m_ProjectionClip = 0.0f; + + descriptor.m_InputIntermediateScale = 0.00001f; + descriptor.m_ForgetIntermediateScale = 0.00001f; + descriptor.m_CellIntermediateScale = 0.00001f; + descriptor.m_OutputIntermediateScale = 0.00001f; + + descriptor.m_HiddenStateScale = 0.07f; + descriptor.m_HiddenStateZeroPoint = 0; + + const unsigned int numBatches = 2; + const unsigned int inputSize = 5; + const unsigned int outputSize = 4; + const unsigned int numUnits = 4; + + // Scale/Offset quantization info + float inputScale = 0.0078f; + int32_t inputOffset = 0; + + float outputScale = 0.0078f; + int32_t outputOffset = 0; + + float cellStateScale = 3.5002e-05f; + int32_t cellStateOffset = 0; + + float weightsScale = 0.007f; + int32_t weightsOffset = 0; + + float biasScale = 3.5002e-05f / 1024; + int32_t biasOffset = 0; + + // Weights and bias tensor and quantization info + armnn::TensorInfo inputWeightsInfo({numUnits, inputSize}, + armnn::DataType::QSymmS8, + weightsScale, + weightsOffset); + + armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize}, + armnn::DataType::QSymmS8, + weightsScale, + weightsOffset); + + armnn::TensorInfo biasInfo({numUnits}, armnn::DataType::Signed32, biasScale, biasOffset); + + std::vector inputToForgetWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + std::vector inputToCellWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + std::vector inputToOutputWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + + armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData); + armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData); + armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData); + + std::vector recurrentToForgetWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + std::vector recurrentToCellWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + std::vector recurrentToOutputWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + + armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData); + armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData); + armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData); + + std::vector forgetGateBiasData(numUnits, 1); + std::vector cellBiasData(numUnits, 0); + std::vector outputGateBiasData(numUnits, 0); + + armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData); + armnn::ConstTensor cellBias(biasInfo, cellBiasData); + armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData); + + // Set up params + armnn::LstmInputParams params; + params.m_InputToForgetWeights = &inputToForgetWeights; + params.m_InputToCellWeights = &inputToCellWeights; + params.m_InputToOutputWeights = &inputToOutputWeights; + + params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; + params.m_RecurrentToCellWeights = &recurrentToCellWeights; + params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; + + params.m_ForgetGateBias = &forgetGateBias; + params.m_CellBias = &cellBias; + params.m_OutputGateBias = &outputGateBias; + + // Create network + armnn::INetworkPtr network = armnn::INetwork::Create(); + const std::string layerName("qLstm"); + + armnn::IConnectableLayer* const input = network->AddInputLayer(0); + armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1); + armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2); + + armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str()); + + armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0); + armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1); + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2); + + // Input/Output tensor info + armnn::TensorInfo inputInfo({numBatches , inputSize}, + armnn::DataType::QAsymmS8, + inputScale, + inputOffset); + + armnn::TensorInfo cellStateInfo({numBatches , numUnits}, + armnn::DataType::QSymmS16, + cellStateScale, + cellStateOffset); + + armnn::TensorInfo outputStateInfo({numBatches , outputSize}, + armnn::DataType::QAsymmS8, + outputScale, + outputOffset); + + // Connect input/output slots + input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0)); + input->GetOutputSlot(0).SetTensorInfo(inputInfo); + + outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1)); + outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo); + + cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2)); + cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo); + + qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0)); + qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo); + + qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0)); + qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo); + + qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0)); + qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); + + VerifyLstmLayer checker( + layerName, + {inputInfo, cellStateInfo, outputStateInfo}, + {outputStateInfo, cellStateInfo, outputStateInfo}, + descriptor, + params); + + deserializedNetwork->ExecuteStrategy(checker); +} + +BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmCifgLayerNorm) +{ + armnn::QLstmDescriptor descriptor; + + // CIFG params are used when CIFG is disabled + descriptor.m_CifgEnabled = true; + descriptor.m_ProjectionEnabled = false; + descriptor.m_PeepholeEnabled = false; + descriptor.m_LayerNormEnabled = true; + + descriptor.m_CellClip = 0.0f; + descriptor.m_ProjectionClip = 0.0f; + + descriptor.m_InputIntermediateScale = 0.00001f; + descriptor.m_ForgetIntermediateScale = 0.00001f; + descriptor.m_CellIntermediateScale = 0.00001f; + descriptor.m_OutputIntermediateScale = 0.00001f; + + descriptor.m_HiddenStateScale = 0.07f; + descriptor.m_HiddenStateZeroPoint = 0; + + const unsigned int numBatches = 2; + const unsigned int inputSize = 5; + const unsigned int outputSize = 4; + const unsigned int numUnits = 4; + + // Scale/Offset quantization info + float inputScale = 0.0078f; + int32_t inputOffset = 0; + + float outputScale = 0.0078f; + int32_t outputOffset = 0; + + float cellStateScale = 3.5002e-05f; + int32_t cellStateOffset = 0; + + float weightsScale = 0.007f; + int32_t weightsOffset = 0; + + float layerNormScale = 3.5002e-05f; + int32_t layerNormOffset = 0; + + float biasScale = layerNormScale / 1024; + int32_t biasOffset = 0; + + // Weights and bias tensor and quantization info + armnn::TensorInfo inputWeightsInfo({numUnits, inputSize}, + armnn::DataType::QSymmS8, + weightsScale, + weightsOffset); + + armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize}, + armnn::DataType::QSymmS8, + weightsScale, + weightsOffset); + + armnn::TensorInfo biasInfo({numUnits}, + armnn::DataType::Signed32, + biasScale, + biasOffset); + + armnn::TensorInfo layerNormWeightsInfo({numUnits}, + armnn::DataType::QSymmS16, + layerNormScale, + layerNormOffset); + + // Mandatory params + std::vector inputToForgetWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + std::vector inputToCellWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + std::vector inputToOutputWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + + armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData); + armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData); + armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData); + + std::vector recurrentToForgetWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + std::vector recurrentToCellWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + std::vector recurrentToOutputWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + + armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData); + armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData); + armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData); + + std::vector forgetGateBiasData(numUnits, 1); + std::vector cellBiasData(numUnits, 0); + std::vector outputGateBiasData(numUnits, 0); + + armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData); + armnn::ConstTensor cellBias(biasInfo, cellBiasData); + armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData); + + // Layer Norm + std::vector forgetLayerNormWeightsData = + GenerateRandomData(layerNormWeightsInfo.GetNumElements()); + std::vector cellLayerNormWeightsData = + GenerateRandomData(layerNormWeightsInfo.GetNumElements()); + std::vector outputLayerNormWeightsData = + GenerateRandomData(layerNormWeightsInfo.GetNumElements()); + + armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData); + armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData); + armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData); + + // Set up params + armnn::LstmInputParams params; + + // Mandatory params + params.m_InputToForgetWeights = &inputToForgetWeights; + params.m_InputToCellWeights = &inputToCellWeights; + params.m_InputToOutputWeights = &inputToOutputWeights; + + params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; + params.m_RecurrentToCellWeights = &recurrentToCellWeights; + params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; + + params.m_ForgetGateBias = &forgetGateBias; + params.m_CellBias = &cellBias; + params.m_OutputGateBias = &outputGateBias; + + // Layer Norm + params.m_ForgetLayerNormWeights = &forgetLayerNormWeights; + params.m_CellLayerNormWeights = &cellLayerNormWeights; + params.m_OutputLayerNormWeights = &outputLayerNormWeights; + + // Create network + armnn::INetworkPtr network = armnn::INetwork::Create(); + const std::string layerName("qLstm"); + + armnn::IConnectableLayer* const input = network->AddInputLayer(0); + armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1); + armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2); + + armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str()); + + armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0); + armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1); + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2); + + // Input/Output tensor info + armnn::TensorInfo inputInfo({numBatches , inputSize}, + armnn::DataType::QAsymmS8, + inputScale, + inputOffset); + + armnn::TensorInfo cellStateInfo({numBatches , numUnits}, + armnn::DataType::QSymmS16, + cellStateScale, + cellStateOffset); + + armnn::TensorInfo outputStateInfo({numBatches , outputSize}, + armnn::DataType::QAsymmS8, + outputScale, + outputOffset); + + // Connect input/output slots + input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0)); + input->GetOutputSlot(0).SetTensorInfo(inputInfo); + + outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1)); + outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo); + + cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2)); + cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo); + + qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0)); + qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo); + + qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0)); + qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo); + + qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0)); + qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); + + VerifyLstmLayer checker(layerName, + {inputInfo, cellStateInfo, outputStateInfo}, + {outputStateInfo, cellStateInfo, outputStateInfo}, + descriptor, + params); + + deserializedNetwork->ExecuteStrategy(checker); +} + +BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmAdvanced) +{ + armnn::QLstmDescriptor descriptor; + + descriptor.m_CifgEnabled = false; + descriptor.m_ProjectionEnabled = true; + descriptor.m_PeepholeEnabled = true; + descriptor.m_LayerNormEnabled = true; + + descriptor.m_CellClip = 0.1f; + descriptor.m_ProjectionClip = 0.1f; + + descriptor.m_InputIntermediateScale = 0.00001f; + descriptor.m_ForgetIntermediateScale = 0.00001f; + descriptor.m_CellIntermediateScale = 0.00001f; + descriptor.m_OutputIntermediateScale = 0.00001f; + + descriptor.m_HiddenStateScale = 0.07f; + descriptor.m_HiddenStateZeroPoint = 0; + + const unsigned int numBatches = 2; + const unsigned int inputSize = 5; + const unsigned int outputSize = 4; + const unsigned int numUnits = 4; + + // Scale/Offset quantization info + float inputScale = 0.0078f; + int32_t inputOffset = 0; + + float outputScale = 0.0078f; + int32_t outputOffset = 0; + + float cellStateScale = 3.5002e-05f; + int32_t cellStateOffset = 0; + + float weightsScale = 0.007f; + int32_t weightsOffset = 0; + + float layerNormScale = 3.5002e-05f; + int32_t layerNormOffset = 0; + + float biasScale = layerNormScale / 1024; + int32_t biasOffset = 0; + + // Weights and bias tensor and quantization info + armnn::TensorInfo inputWeightsInfo({numUnits, inputSize}, + armnn::DataType::QSymmS8, + weightsScale, + weightsOffset); + + armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize}, + armnn::DataType::QSymmS8, + weightsScale, + weightsOffset); + + armnn::TensorInfo biasInfo({numUnits}, + armnn::DataType::Signed32, + biasScale, + biasOffset); + + armnn::TensorInfo peepholeWeightsInfo({numUnits}, + armnn::DataType::QSymmS16, + weightsScale, + weightsOffset); + + armnn::TensorInfo layerNormWeightsInfo({numUnits}, + armnn::DataType::QSymmS16, + layerNormScale, + layerNormOffset); + + armnn::TensorInfo projectionWeightsInfo({outputSize, numUnits}, + armnn::DataType::QSymmS8, + weightsScale, + weightsOffset); + + // Mandatory params + std::vector inputToForgetWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + std::vector inputToCellWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + std::vector inputToOutputWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + + armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData); + armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData); + armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData); + + std::vector recurrentToForgetWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + std::vector recurrentToCellWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + std::vector recurrentToOutputWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + + armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData); + armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData); + armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData); + + std::vector forgetGateBiasData(numUnits, 1); + std::vector cellBiasData(numUnits, 0); + std::vector outputGateBiasData(numUnits, 0); + + armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData); + armnn::ConstTensor cellBias(biasInfo, cellBiasData); + armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData); + + // CIFG + std::vector inputToInputWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); + std::vector recurrentToInputWeightsData = + GenerateRandomData(recurrentWeightsInfo.GetNumElements()); + std::vector inputGateBiasData(numUnits, 1); + + armnn::ConstTensor inputToInputWeights(inputWeightsInfo, inputToInputWeightsData); + armnn::ConstTensor recurrentToInputWeights(recurrentWeightsInfo, recurrentToInputWeightsData); + armnn::ConstTensor inputGateBias(biasInfo, inputGateBiasData); + + // Peephole + std::vector cellToInputWeightsData = GenerateRandomData(peepholeWeightsInfo.GetNumElements()); + std::vector cellToForgetWeightsData = GenerateRandomData(peepholeWeightsInfo.GetNumElements()); + std::vector cellToOutputWeightsData = GenerateRandomData(peepholeWeightsInfo.GetNumElements()); + + armnn::ConstTensor cellToInputWeights(peepholeWeightsInfo, cellToInputWeightsData); + armnn::ConstTensor cellToForgetWeights(peepholeWeightsInfo, cellToForgetWeightsData); + armnn::ConstTensor cellToOutputWeights(peepholeWeightsInfo, cellToOutputWeightsData); + + // Projection + std::vector projectionWeightsData = GenerateRandomData(projectionWeightsInfo.GetNumElements()); + std::vector projectionBiasData(outputSize, 1); + + armnn::ConstTensor projectionWeights(projectionWeightsInfo, projectionWeightsData); + armnn::ConstTensor projectionBias(biasInfo, projectionBiasData); + + // Layer Norm + std::vector inputLayerNormWeightsData = + GenerateRandomData(layerNormWeightsInfo.GetNumElements()); + std::vector forgetLayerNormWeightsData = + GenerateRandomData(layerNormWeightsInfo.GetNumElements()); + std::vector cellLayerNormWeightsData = + GenerateRandomData(layerNormWeightsInfo.GetNumElements()); + std::vector outputLayerNormWeightsData = + GenerateRandomData(layerNormWeightsInfo.GetNumElements()); + + armnn::ConstTensor inputLayerNormWeights(layerNormWeightsInfo, inputLayerNormWeightsData); + armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData); + armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData); + armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData); + + // Set up params + armnn::LstmInputParams params; + + // Mandatory params + params.m_InputToForgetWeights = &inputToForgetWeights; + params.m_InputToCellWeights = &inputToCellWeights; + params.m_InputToOutputWeights = &inputToOutputWeights; + + params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; + params.m_RecurrentToCellWeights = &recurrentToCellWeights; + params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; + + params.m_ForgetGateBias = &forgetGateBias; + params.m_CellBias = &cellBias; + params.m_OutputGateBias = &outputGateBias; + + // CIFG + params.m_InputToInputWeights = &inputToInputWeights; + params.m_RecurrentToInputWeights = &recurrentToInputWeights; + params.m_InputGateBias = &inputGateBias; + + // Peephole + params.m_CellToInputWeights = &cellToInputWeights; + params.m_CellToForgetWeights = &cellToForgetWeights; + params.m_CellToOutputWeights = &cellToOutputWeights; + + // Projection + params.m_ProjectionWeights = &projectionWeights; + params.m_ProjectionBias = &projectionBias; + + // Layer Norm + params.m_InputLayerNormWeights = &inputLayerNormWeights; + params.m_ForgetLayerNormWeights = &forgetLayerNormWeights; + params.m_CellLayerNormWeights = &cellLayerNormWeights; + params.m_OutputLayerNormWeights = &outputLayerNormWeights; + + // Create network + armnn::INetworkPtr network = armnn::INetwork::Create(); + const std::string layerName("qLstm"); + + armnn::IConnectableLayer* const input = network->AddInputLayer(0); + armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1); + armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2); + + armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str()); + + armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0); + armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1); + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2); + + // Input/Output tensor info + armnn::TensorInfo inputInfo({numBatches , inputSize}, + armnn::DataType::QAsymmS8, + inputScale, + inputOffset); + + armnn::TensorInfo cellStateInfo({numBatches , numUnits}, + armnn::DataType::QSymmS16, + cellStateScale, + cellStateOffset); + + armnn::TensorInfo outputStateInfo({numBatches , outputSize}, + armnn::DataType::QAsymmS8, + outputScale, + outputOffset); + + // Connect input/output slots + input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0)); + input->GetOutputSlot(0).SetTensorInfo(inputInfo); + + outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1)); + outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo); + + cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2)); + cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo); + + qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0)); + qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo); + + qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0)); + qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo); + + qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0)); + qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); + + VerifyLstmLayer checker(layerName, + {inputInfo, cellStateInfo, outputStateInfo}, + {outputStateInfo, cellStateInfo, outputStateInfo}, + descriptor, + params); + + deserializedNetwork->ExecuteStrategy(checker); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnSerializer/test/SerializerTestUtils.cpp b/src/armnnSerializer/test/SerializerTestUtils.cpp new file mode 100644 index 0000000000..586d2a05a5 --- /dev/null +++ b/src/armnnSerializer/test/SerializerTestUtils.cpp @@ -0,0 +1,163 @@ +// +// Copyright © 2021 Arm Ltd and Contributors. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include "SerializerTestUtils.hpp" +#include "../Serializer.hpp" + +using armnnDeserializer::IDeserializer; + +LayerVerifierBase::LayerVerifierBase(const std::string& layerName, + const std::vector& inputInfos, + const std::vector& outputInfos) + : m_LayerName(layerName) + , m_InputTensorInfos(inputInfos) + , m_OutputTensorInfos(outputInfos) +{} + +void LayerVerifierBase::ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id) +{ + armnn::IgnoreUnused(descriptor, constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + default: + { + VerifyNameAndConnections(layer, name); + } + } +} + + +void LayerVerifierBase::VerifyNameAndConnections(const armnn::IConnectableLayer* layer, const char* name) +{ + BOOST_TEST(name == m_LayerName.c_str()); + + BOOST_TEST(layer->GetNumInputSlots() == m_InputTensorInfos.size()); + BOOST_TEST(layer->GetNumOutputSlots() == m_OutputTensorInfos.size()); + + for (unsigned int i = 0; i < m_InputTensorInfos.size(); i++) + { + const armnn::IOutputSlot* connectedOutput = layer->GetInputSlot(i).GetConnection(); + BOOST_CHECK(connectedOutput); + + const armnn::TensorInfo& connectedInfo = connectedOutput->GetTensorInfo(); + BOOST_TEST(connectedInfo.GetShape() == m_InputTensorInfos[i].GetShape()); + BOOST_TEST( + GetDataTypeName(connectedInfo.GetDataType()) == GetDataTypeName(m_InputTensorInfos[i].GetDataType())); + + BOOST_TEST(connectedInfo.GetQuantizationScale() == m_InputTensorInfos[i].GetQuantizationScale()); + BOOST_TEST(connectedInfo.GetQuantizationOffset() == m_InputTensorInfos[i].GetQuantizationOffset()); + } + + for (unsigned int i = 0; i < m_OutputTensorInfos.size(); i++) + { + const armnn::TensorInfo& outputInfo = layer->GetOutputSlot(i).GetTensorInfo(); + BOOST_TEST(outputInfo.GetShape() == m_OutputTensorInfos[i].GetShape()); + BOOST_TEST( + GetDataTypeName(outputInfo.GetDataType()) == GetDataTypeName(m_OutputTensorInfos[i].GetDataType())); + + BOOST_TEST(outputInfo.GetQuantizationScale() == m_OutputTensorInfos[i].GetQuantizationScale()); + BOOST_TEST(outputInfo.GetQuantizationOffset() == m_OutputTensorInfos[i].GetQuantizationOffset()); + } +} + +void LayerVerifierBase::VerifyConstTensors(const std::string& tensorName, + const armnn::ConstTensor* expectedPtr, + const armnn::ConstTensor* actualPtr) +{ + if (expectedPtr == nullptr) + { + BOOST_CHECK_MESSAGE(actualPtr == nullptr, tensorName + " should not exist"); + } + else + { + BOOST_CHECK_MESSAGE(actualPtr != nullptr, tensorName + " should have been set"); + if (actualPtr != nullptr) + { + const armnn::TensorInfo& expectedInfo = expectedPtr->GetInfo(); + const armnn::TensorInfo& actualInfo = actualPtr->GetInfo(); + + BOOST_CHECK_MESSAGE(expectedInfo.GetShape() == actualInfo.GetShape(), + tensorName + " shapes don't match"); + BOOST_CHECK_MESSAGE( + GetDataTypeName(expectedInfo.GetDataType()) == GetDataTypeName(actualInfo.GetDataType()), + tensorName + " data types don't match"); + + BOOST_CHECK_MESSAGE(expectedPtr->GetNumBytes() == actualPtr->GetNumBytes(), + tensorName + " (GetNumBytes) data sizes do not match"); + if (expectedPtr->GetNumBytes() == actualPtr->GetNumBytes()) + { + //check the data is identical + const char* expectedData = static_cast(expectedPtr->GetMemoryArea()); + const char* actualData = static_cast(actualPtr->GetMemoryArea()); + bool same = true; + for (unsigned int i = 0; i < expectedPtr->GetNumBytes(); ++i) + { + same = expectedData[i] == actualData[i]; + if (!same) + { + break; + } + } + BOOST_CHECK_MESSAGE(same, tensorName + " data does not match"); + } + } + } +} + +void CompareConstTensor(const armnn::ConstTensor& tensor1, const armnn::ConstTensor& tensor2) +{ + BOOST_TEST(tensor1.GetShape() == tensor2.GetShape()); + BOOST_TEST(GetDataTypeName(tensor1.GetDataType()) == GetDataTypeName(tensor2.GetDataType())); + + switch (tensor1.GetDataType()) + { + case armnn::DataType::Float32: + CompareConstTensorData( + tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements()); + break; + case armnn::DataType::QAsymmU8: + case armnn::DataType::Boolean: + CompareConstTensorData( + tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements()); + break; + case armnn::DataType::QSymmS8: + CompareConstTensorData( + tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements()); + break; + case armnn::DataType::Signed32: + CompareConstTensorData( + tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements()); + break; + default: + // Note that Float16 is not yet implemented + BOOST_TEST_MESSAGE("Unexpected datatype"); + BOOST_TEST(false); + } +} + +armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString) +{ + std::vector const serializerVector{serializerString.begin(), serializerString.end()}; + return IDeserializer::Create()->CreateNetworkFromBinary(serializerVector); +} + +std::string SerializeNetwork(const armnn::INetwork& network) +{ + armnnSerializer::ISerializerPtr serializer = armnnSerializer::ISerializer::Create(); + + serializer->Serialize(network); + + std::stringstream stream; + serializer->SaveSerializedToStream(stream); + + std::string serializerString{stream.str()}; + return serializerString; +} diff --git a/src/armnnSerializer/test/SerializerTestUtils.hpp b/src/armnnSerializer/test/SerializerTestUtils.hpp new file mode 100644 index 0000000000..e085d2ef15 --- /dev/null +++ b/src/armnnSerializer/test/SerializerTestUtils.hpp @@ -0,0 +1,167 @@ +// +// Copyright © 2021 Arm Ltd and Contributors. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include +#include +#include +#include +#include + +#include +#include + +#include + + +armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString); + +std::string SerializeNetwork(const armnn::INetwork& network); + +void CompareConstTensor(const armnn::ConstTensor& tensor1, const armnn::ConstTensor& tensor2); + +class LayerVerifierBase : public armnn::IStrategy +{ +public: + LayerVerifierBase(const std::string& layerName, + const std::vector& inputInfos, + const std::vector& outputInfos); + + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override; + +protected: + void VerifyNameAndConnections(const armnn::IConnectableLayer* layer, const char* name); + + void VerifyConstTensors(const std::string& tensorName, + const armnn::ConstTensor* expectedPtr, + const armnn::ConstTensor* actualPtr); + +private: + std::string m_LayerName; + std::vector m_InputTensorInfos; + std::vector m_OutputTensorInfos; +}; + +template +class LayerVerifierBaseWithDescriptor : public LayerVerifierBase +{ +public: + LayerVerifierBaseWithDescriptor(const std::string& layerName, + const std::vector& inputInfos, + const std::vector& outputInfos, + const Descriptor& descriptor) + : LayerVerifierBase(layerName, inputInfos, outputInfos) + , m_Descriptor(descriptor) {} + + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override + { + armnn::IgnoreUnused(constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + default: + { + VerifyNameAndConnections(layer, name); + const Descriptor& internalDescriptor = static_cast(descriptor); + VerifyDescriptor(internalDescriptor); + break; + } + } + } + +protected: + void VerifyDescriptor(const Descriptor& descriptor) + { + BOOST_CHECK(descriptor == m_Descriptor); + } + + Descriptor m_Descriptor; +}; + +template +void CompareConstTensorData(const void* data1, const void* data2, unsigned int numElements) +{ + T typedData1 = static_cast(data1); + T typedData2 = static_cast(data2); + BOOST_CHECK(typedData1); + BOOST_CHECK(typedData2); + + for (unsigned int i = 0; i < numElements; i++) + { + BOOST_TEST(typedData1[i] == typedData2[i]); + } +} + + +template +class LayerVerifierBaseWithDescriptorAndConstants : public LayerVerifierBaseWithDescriptor +{ +public: + LayerVerifierBaseWithDescriptorAndConstants(const std::string& layerName, + const std::vector& inputInfos, + const std::vector& outputInfos, + const Descriptor& descriptor, + const std::vector& constants) + : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) + , m_Constants(constants) {} + + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override + { + armnn::IgnoreUnused(id); + + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + default: + { + this->VerifyNameAndConnections(layer, name); + const Descriptor& internalDescriptor = static_cast(descriptor); + this->VerifyDescriptor(internalDescriptor); + + for(std::size_t i = 0; i < constants.size(); i++) + { + CompareConstTensor(constants[i], m_Constants[i]); + } + } + } + } + +private: + std::vector m_Constants; +}; + +template +static std::vector GenerateRandomData(size_t size) +{ + constexpr bool isIntegerType = std::is_integral::value; + using Distribution = + typename std::conditional, + std::uniform_real_distribution>::type; + + static constexpr DataType lowerLimit = std::numeric_limits::min(); + static constexpr DataType upperLimit = std::numeric_limits::max(); + + static Distribution distribution(lowerLimit, upperLimit); + static std::default_random_engine generator; + + std::vector randomData(size); + std::generate(randomData.begin(), randomData.end(), []() { return distribution(generator); }); + + return randomData; +} \ No newline at end of file diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp index 44e8a3898e..f261731a75 100644 --- a/src/armnnSerializer/test/SerializerTests.cpp +++ b/src/armnnSerializer/test/SerializerTests.cpp @@ -4,6 +4,7 @@ // #include "../Serializer.hpp" +#include "SerializerTestUtils.hpp" #include #include @@ -11,6 +12,7 @@ #include #include #include +#include #include #include @@ -19,264 +21,36 @@ using armnnDeserializer::IDeserializer; -namespace -{ - -#define DECLARE_LAYER_VERIFIER_CLASS(name) \ -class name##LayerVerifier : public LayerVerifierBase \ -{ \ -public: \ - name##LayerVerifier(const std::string& layerName, \ - const std::vector& inputInfos, \ - const std::vector& outputInfos) \ - : LayerVerifierBase(layerName, inputInfos, outputInfos) {} \ -\ - void Visit##name##Layer(const armnn::IConnectableLayer* layer, const char* name) override \ - { \ - VerifyNameAndConnections(layer, name); \ - } \ -}; - -#define DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(name) \ -class name##LayerVerifier : public LayerVerifierBaseWithDescriptor \ -{ \ -public: \ - name##LayerVerifier(const std::string& layerName, \ - const std::vector& inputInfos, \ - const std::vector& outputInfos, \ - const armnn::name##Descriptor& descriptor) \ - : LayerVerifierBaseWithDescriptor( \ - layerName, inputInfos, outputInfos, descriptor) {} \ -\ - void Visit##name##Layer(const armnn::IConnectableLayer* layer, \ - const armnn::name##Descriptor& descriptor, \ - const char* name) override \ - { \ - VerifyNameAndConnections(layer, name); \ - VerifyDescriptor(descriptor); \ - } \ -}; - -struct DefaultLayerVerifierPolicy -{ - static void Apply(const std::string) - { - BOOST_TEST_MESSAGE("Unexpected layer found in network"); - BOOST_TEST(false); - } -}; - -class LayerVerifierBase : public armnn::LayerVisitorBase -{ -public: - LayerVerifierBase(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos) - : m_LayerName(layerName) - , m_InputTensorInfos(inputInfos) - , m_OutputTensorInfos(outputInfos) {} - - void VisitInputLayer(const armnn::IConnectableLayer*, armnn::LayerBindingId, const char*) override {} - - void VisitOutputLayer(const armnn::IConnectableLayer*, armnn::LayerBindingId, const char*) override {} - -protected: - void VerifyNameAndConnections(const armnn::IConnectableLayer* layer, const char* name) - { - BOOST_TEST(name == m_LayerName.c_str()); - - BOOST_TEST(layer->GetNumInputSlots() == m_InputTensorInfos.size()); - BOOST_TEST(layer->GetNumOutputSlots() == m_OutputTensorInfos.size()); - - for (unsigned int i = 0; i < m_InputTensorInfos.size(); i++) - { - const armnn::IOutputSlot* connectedOutput = layer->GetInputSlot(i).GetConnection(); - BOOST_CHECK(connectedOutput); - - const armnn::TensorInfo& connectedInfo = connectedOutput->GetTensorInfo(); - BOOST_TEST(connectedInfo.GetShape() == m_InputTensorInfos[i].GetShape()); - BOOST_TEST( - GetDataTypeName(connectedInfo.GetDataType()) == GetDataTypeName(m_InputTensorInfos[i].GetDataType())); - - BOOST_TEST(connectedInfo.GetQuantizationScale() == m_InputTensorInfos[i].GetQuantizationScale()); - BOOST_TEST(connectedInfo.GetQuantizationOffset() == m_InputTensorInfos[i].GetQuantizationOffset()); - } - - for (unsigned int i = 0; i < m_OutputTensorInfos.size(); i++) - { - const armnn::TensorInfo& outputInfo = layer->GetOutputSlot(i).GetTensorInfo(); - BOOST_TEST(outputInfo.GetShape() == m_OutputTensorInfos[i].GetShape()); - BOOST_TEST( - GetDataTypeName(outputInfo.GetDataType()) == GetDataTypeName(m_OutputTensorInfos[i].GetDataType())); - - BOOST_TEST(outputInfo.GetQuantizationScale() == m_OutputTensorInfos[i].GetQuantizationScale()); - BOOST_TEST(outputInfo.GetQuantizationOffset() == m_OutputTensorInfos[i].GetQuantizationOffset()); - } - } - - void VerifyConstTensors(const std::string& tensorName, - const armnn::ConstTensor* expectedPtr, - const armnn::ConstTensor* actualPtr) - { - if (expectedPtr == nullptr) - { - BOOST_CHECK_MESSAGE(actualPtr == nullptr, tensorName + " should not exist"); - } - else - { - BOOST_CHECK_MESSAGE(actualPtr != nullptr, tensorName + " should have been set"); - if (actualPtr != nullptr) - { - const armnn::TensorInfo& expectedInfo = expectedPtr->GetInfo(); - const armnn::TensorInfo& actualInfo = actualPtr->GetInfo(); - - BOOST_CHECK_MESSAGE(expectedInfo.GetShape() == actualInfo.GetShape(), - tensorName + " shapes don't match"); - BOOST_CHECK_MESSAGE( - GetDataTypeName(expectedInfo.GetDataType()) == GetDataTypeName(actualInfo.GetDataType()), - tensorName + " data types don't match"); - - BOOST_CHECK_MESSAGE(expectedPtr->GetNumBytes() == actualPtr->GetNumBytes(), - tensorName + " (GetNumBytes) data sizes do not match"); - if (expectedPtr->GetNumBytes() == actualPtr->GetNumBytes()) - { - //check the data is identical - const char* expectedData = static_cast(expectedPtr->GetMemoryArea()); - const char* actualData = static_cast(actualPtr->GetMemoryArea()); - bool same = true; - for (unsigned int i = 0; i < expectedPtr->GetNumBytes(); ++i) - { - same = expectedData[i] == actualData[i]; - if (!same) - { - break; - } - } - BOOST_CHECK_MESSAGE(same, tensorName + " data does not match"); - } - } - } - } - -private: - std::string m_LayerName; - std::vector m_InputTensorInfos; - std::vector m_OutputTensorInfos; -}; - -template -class LayerVerifierBaseWithDescriptor : public LayerVerifierBase -{ -public: - LayerVerifierBaseWithDescriptor(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const Descriptor& descriptor) - : LayerVerifierBase(layerName, inputInfos, outputInfos) - , m_Descriptor(descriptor) {} - -protected: - void VerifyDescriptor(const Descriptor& descriptor) - { - BOOST_CHECK(descriptor == m_Descriptor); - } - - Descriptor m_Descriptor; -}; - -template -void CompareConstTensorData(const void* data1, const void* data2, unsigned int numElements) -{ - T typedData1 = static_cast(data1); - T typedData2 = static_cast(data2); - BOOST_CHECK(typedData1); - BOOST_CHECK(typedData2); - - for (unsigned int i = 0; i < numElements; i++) - { - BOOST_TEST(typedData1[i] == typedData2[i]); - } -} - -void CompareConstTensor(const armnn::ConstTensor& tensor1, const armnn::ConstTensor& tensor2) -{ - BOOST_TEST(tensor1.GetShape() == tensor2.GetShape()); - BOOST_TEST(GetDataTypeName(tensor1.GetDataType()) == GetDataTypeName(tensor2.GetDataType())); - - switch (tensor1.GetDataType()) - { - case armnn::DataType::Float32: - CompareConstTensorData( - tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements()); - break; - case armnn::DataType::QAsymmU8: - case armnn::DataType::Boolean: - CompareConstTensorData( - tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements()); - break; - case armnn::DataType::QSymmS8: - CompareConstTensorData( - tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements()); - break; - case armnn::DataType::Signed32: - CompareConstTensorData( - tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements()); - break; - default: - // Note that Float16 is not yet implemented - BOOST_TEST_MESSAGE("Unexpected datatype"); - BOOST_TEST(false); - } -} - -armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString) -{ - std::vector const serializerVector{serializerString.begin(), serializerString.end()}; - return IDeserializer::Create()->CreateNetworkFromBinary(serializerVector); -} +BOOST_AUTO_TEST_SUITE(SerializerTests) -std::string SerializeNetwork(const armnn::INetwork& network) +BOOST_AUTO_TEST_CASE(SerializeAbs) { - armnnSerializer::ISerializerPtr serializer = armnnSerializer::ISerializer::Create(); - - serializer->Serialize(network); - - std::stringstream stream; - serializer->SaveSerializedToStream(stream); + const std::string layerName("abs"); + const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32); - std::string serializerString{stream.str()}; - return serializerString; -} + armnn::INetworkPtr network = armnn::INetwork::Create(); + armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); -template -static std::vector GenerateRandomData(size_t size) -{ - constexpr bool isIntegerType = std::is_integral::value; - using Distribution = - typename std::conditional, - std::uniform_real_distribution>::type; + ARMNN_NO_DEPRECATE_WARN_BEGIN + armnn::IConnectableLayer* const absLayer = network->AddAbsLayer(layerName.c_str()); + ARMNN_NO_DEPRECATE_WARN_END + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0); - static constexpr DataType lowerLimit = std::numeric_limits::min(); - static constexpr DataType upperLimit = std::numeric_limits::max(); + inputLayer->GetOutputSlot(0).Connect(absLayer->GetInputSlot(0)); + absLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0)); - static Distribution distribution(lowerLimit, upperLimit); - static std::default_random_engine generator; + inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo); + absLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo); - std::vector randomData(size); - std::generate(randomData.begin(), randomData.end(), []() { return distribution(generator); }); + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); - return randomData; + LayerVerifierBase verifier(layerName, {tensorInfo}, {tensorInfo}); + deserializedNetwork->ExecuteStrategy(verifier); } -} // anonymous namespace - -BOOST_AUTO_TEST_SUITE(SerializerTests) - BOOST_AUTO_TEST_CASE(SerializeAddition) { - DECLARE_LAYER_VERIFIER_CLASS(Addition) - const std::string layerName("addition"); const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32); @@ -294,17 +68,16 @@ BOOST_AUTO_TEST_CASE(SerializeAddition) inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo); additionLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo); - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + std::string serializedNetwork = SerializeNetwork(*network); + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(serializedNetwork); BOOST_CHECK(deserializedNetwork); - AdditionLayerVerifier verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeArgMinMax) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(ArgMinMax) - const std::string layerName("argminmax"); const armnn::TensorInfo inputInfo({1, 2, 3}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({1, 3}, armnn::DataType::Signed32); @@ -327,54 +100,15 @@ BOOST_AUTO_TEST_CASE(SerializeArgMinMax) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - ArgMinMaxLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, + {inputInfo}, + {outputInfo}, + descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeBatchNormalization) { - using Descriptor = armnn::BatchNormalizationDescriptor; - class BatchNormalizationLayerVerifier : public LayerVerifierBaseWithDescriptor - { - public: - BatchNormalizationLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const Descriptor& descriptor, - const armnn::ConstTensor& mean, - const armnn::ConstTensor& variance, - const armnn::ConstTensor& beta, - const armnn::ConstTensor& gamma) - : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) - , m_Mean(mean) - , m_Variance(variance) - , m_Beta(beta) - , m_Gamma(gamma) {} - - void VisitBatchNormalizationLayer(const armnn::IConnectableLayer* layer, - const Descriptor& descriptor, - const armnn::ConstTensor& mean, - const armnn::ConstTensor& variance, - const armnn::ConstTensor& beta, - const armnn::ConstTensor& gamma, - const char* name) override - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - - CompareConstTensor(mean, m_Mean); - CompareConstTensor(variance, m_Variance); - CompareConstTensor(beta, m_Beta); - CompareConstTensor(gamma, m_Gamma); - } - - private: - armnn::ConstTensor m_Mean; - armnn::ConstTensor m_Variance; - armnn::ConstTensor m_Beta; - armnn::ConstTensor m_Gamma; - }; - const std::string layerName("batchNormalization"); const armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32); @@ -393,15 +127,21 @@ BOOST_AUTO_TEST_CASE(SerializeBatchNormalization) std::vector betaData({1.0}); std::vector gammaData({0.0}); - armnn::ConstTensor mean(meanInfo, meanData); - armnn::ConstTensor variance(varianceInfo, varianceData); - armnn::ConstTensor beta(betaInfo, betaData); - armnn::ConstTensor gamma(gammaInfo, gammaData); + std::vector constants; + constants.emplace_back(armnn::ConstTensor(meanInfo, meanData)); + constants.emplace_back(armnn::ConstTensor(varianceInfo, varianceData)); + constants.emplace_back(armnn::ConstTensor(betaInfo, betaData)); + constants.emplace_back(armnn::ConstTensor(gammaInfo, gammaData)); armnn::INetworkPtr network = armnn::INetwork::Create(); armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); armnn::IConnectableLayer* const batchNormalizationLayer = - network->AddBatchNormalizationLayer(descriptor, mean, variance, beta, gamma, layerName.c_str()); + network->AddBatchNormalizationLayer(descriptor, + constants[0], + constants[1], + constants[2], + constants[3], + layerName.c_str()); armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0); inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0)); @@ -413,15 +153,13 @@ BOOST_AUTO_TEST_CASE(SerializeBatchNormalization) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - BatchNormalizationLayerVerifier verifier( - layerName, {inputInfo}, {outputInfo}, descriptor, mean, variance, beta, gamma); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptorAndConstants verifier( + layerName, {inputInfo}, {outputInfo}, descriptor, constants); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeBatchToSpaceNd) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(BatchToSpaceNd) - const std::string layerName("spaceToBatchNd"); const armnn::TensorInfo inputInfo({4, 1, 2, 2}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({1, 1, 4, 4}, armnn::DataType::Float32); @@ -445,14 +183,15 @@ BOOST_AUTO_TEST_CASE(SerializeBatchToSpaceNd) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - BatchToSpaceNdLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, + {inputInfo}, + {outputInfo}, + desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeComparison) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Comparison) - const std::string layerName("comparison"); const armnn::TensorShape shape{2, 1, 2, 4}; @@ -479,8 +218,11 @@ BOOST_AUTO_TEST_CASE(SerializeComparison) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - ComparisonLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, + { inputInfo, inputInfo }, + { outputInfo }, + descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeConstant) @@ -491,22 +233,37 @@ BOOST_AUTO_TEST_CASE(SerializeConstant) ConstantLayerVerifier(const std::string& layerName, const std::vector& inputInfos, const std::vector& outputInfos, - const armnn::ConstTensor& layerInput) + const std::vector& constants) : LayerVerifierBase(layerName, inputInfos, outputInfos) - , m_LayerInput(layerInput) {} + , m_Constants(constants) {} - void VisitConstantLayer(const armnn::IConnectableLayer* layer, - const armnn::ConstTensor& input, - const char* name) override + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override { - VerifyNameAndConnections(layer, name); - CompareConstTensor(input, m_LayerInput); - } + armnn::IgnoreUnused(descriptor, id); + + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Addition: break; + default: + { + this->VerifyNameAndConnections(layer, name); - void VisitAdditionLayer(const armnn::IConnectableLayer*, const char*) override {} + for (std::size_t i = 0; i < constants.size(); i++) + { + CompareConstTensor(constants[i], m_Constants[i]); + } + } + } + } private: - armnn::ConstTensor m_LayerInput; + const std::vector m_Constants; }; const std::string layerName("constant"); @@ -532,53 +289,12 @@ BOOST_AUTO_TEST_CASE(SerializeConstant) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor); - deserializedNetwork->Accept(verifier); + ConstantLayerVerifier verifier(layerName, {}, {info}, {constTensor}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeConvolution2d) { - using Descriptor = armnn::Convolution2dDescriptor; - class Convolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor - { - public: - Convolution2dLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases) - : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) - , m_Weights(weights) - , m_Biases(biases) {} - - void VisitConvolution2dLayer(const armnn::IConnectableLayer* layer, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases, - const char* name) override - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - - // check weights - CompareConstTensor(weights, m_Weights); - - // check biases - BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled); - BOOST_CHECK(biases.has_value() == m_Biases.has_value()); - - if (biases.has_value() && m_Biases.has_value()) - { - CompareConstTensor(biases.value(), m_Biases.value()); - } - } - - private: - armnn::ConstTensor m_Weights; - armnn::Optional m_Biases; - }; - const std::string layerName("convolution2d"); const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32); @@ -622,53 +338,14 @@ BOOST_AUTO_TEST_CASE(SerializeConvolution2d) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - Convolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases); - deserializedNetwork->Accept(verifier); + const std::vector& constants {weights, biases}; + LayerVerifierBaseWithDescriptorAndConstants verifier( + layerName, {inputInfo}, {outputInfo}, descriptor, constants); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeConvolution2dWithPerAxisParams) { - using Descriptor = armnn::Convolution2dDescriptor; - class Convolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor - { - public: - Convolution2dLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases) - : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) - , m_Weights(weights) - , m_Biases(biases) {} - - void VisitConvolution2dLayer(const armnn::IConnectableLayer* layer, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases, - const char* name) override - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - - // check weights - CompareConstTensor(weights, m_Weights); - - // check biases - BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled); - BOOST_CHECK(biases.has_value() == m_Biases.has_value()); - - if (biases.has_value() && m_Biases.has_value()) - { - CompareConstTensor(biases.value(), m_Biases.value()); - } - } - - private: - armnn::ConstTensor m_Weights; - armnn::Optional m_Biases; - }; - using namespace armnn; const std::string layerName("convolution2dWithPerAxis"); @@ -716,14 +393,14 @@ BOOST_AUTO_TEST_CASE(SerializeConvolution2dWithPerAxisParams) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - Convolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases); - deserializedNetwork->Accept(verifier); + const std::vector& constants {weights, biases}; + LayerVerifierBaseWithDescriptorAndConstants verifier( + layerName, {inputInfo}, {outputInfo}, descriptor, constants); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeDepthToSpace) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(DepthToSpace) - const std::string layerName("depthToSpace"); const armnn::TensorInfo inputInfo ({ 1, 8, 4, 12 }, armnn::DataType::Float32); @@ -747,53 +424,12 @@ BOOST_AUTO_TEST_CASE(SerializeDepthToSpace) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - DepthToSpaceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, {inputInfo}, {outputInfo}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2d) { - using Descriptor = armnn::DepthwiseConvolution2dDescriptor; - class DepthwiseConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor - { - public: - DepthwiseConvolution2dLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases) : - LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor), - m_Weights(weights), - m_Biases(biases) {} - - void VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases, - const char* name) override - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - - // check weights - CompareConstTensor(weights, m_Weights); - - // check biases - BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled); - BOOST_CHECK(biases.has_value() == m_Biases.has_value()); - - if (biases.has_value() && m_Biases.has_value()) - { - CompareConstTensor(biases.value(), m_Biases.value()); - } - } - - private: - armnn::ConstTensor m_Weights; - armnn::Optional m_Biases; - }; - const std::string layerName("depwiseConvolution2d"); const armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32); @@ -837,53 +473,14 @@ BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2d) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - DepthwiseConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases); - deserializedNetwork->Accept(verifier); + const std::vector& constants {weights, biases}; + LayerVerifierBaseWithDescriptorAndConstants verifier( + layerName, {inputInfo}, {outputInfo}, descriptor, constants); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2dWithPerAxisParams) { - using Descriptor = armnn::DepthwiseConvolution2dDescriptor; - class DepthwiseConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor - { - public: - DepthwiseConvolution2dLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases) : - LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor), - m_Weights(weights), - m_Biases(biases) {} - - void VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases, - const char* name) override - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - - // check weights - CompareConstTensor(weights, m_Weights); - - // check biases - BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled); - BOOST_CHECK(biases.has_value() == m_Biases.has_value()); - - if (biases.has_value() && m_Biases.has_value()) - { - CompareConstTensor(biases.value(), m_Biases.value()); - } - } - - private: - armnn::ConstTensor m_Weights; - armnn::Optional m_Biases; - }; - using namespace armnn; const std::string layerName("depwiseConvolution2dWithPerAxis"); @@ -933,14 +530,14 @@ BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2dWithPerAxisParams) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - DepthwiseConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases); - deserializedNetwork->Accept(verifier); + const std::vector& constants {weights, biases}; + LayerVerifierBaseWithDescriptorAndConstants verifier( + layerName, {inputInfo}, {outputInfo}, descriptor, constants); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeDequantize) { - DECLARE_LAYER_VERIFIER_CLASS(Dequantize) - const std::string layerName("dequantize"); const armnn::TensorInfo inputInfo({ 1, 5, 2, 3 }, armnn::DataType::QAsymmU8, 0.5f, 1); const armnn::TensorInfo outputInfo({ 1, 5, 2, 3 }, armnn::DataType::Float32); @@ -959,39 +556,12 @@ BOOST_AUTO_TEST_CASE(SerializeDequantize) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - DequantizeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeDeserializeDetectionPostProcess) { - using Descriptor = armnn::DetectionPostProcessDescriptor; - class DetectionPostProcessLayerVerifier : public LayerVerifierBaseWithDescriptor - { - public: - DetectionPostProcessLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const Descriptor& descriptor, - const armnn::ConstTensor& anchors) - : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) - , m_Anchors(anchors) {} - - void VisitDetectionPostProcessLayer(const armnn::IConnectableLayer* layer, - const Descriptor& descriptor, - const armnn::ConstTensor& anchors, - const char* name) override - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - - CompareConstTensor(anchors, m_Anchors); - } - - private: - armnn::ConstTensor m_Anchors; - }; - const std::string layerName("detectionPostProcess"); const std::vector inputInfos({ @@ -1051,14 +621,14 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeDetectionPostProcess) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - DetectionPostProcessLayerVerifier verifier(layerName, inputInfos, outputInfos, descriptor, anchors); - deserializedNetwork->Accept(verifier); + const std::vector& constants {anchors}; + LayerVerifierBaseWithDescriptorAndConstants verifier( + layerName, inputInfos, outputInfos, descriptor, constants); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeDivision) { - DECLARE_LAYER_VERIFIER_CLASS(Division) - const std::string layerName("division"); const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32); @@ -1079,131 +649,41 @@ BOOST_AUTO_TEST_CASE(SerializeDivision) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - DivisionLayerVerifier verifier(layerName, {info, info}, {info}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {info, info}, {info}); + deserializedNetwork->ExecuteStrategy(verifier); } -class EqualLayerVerifier : public LayerVerifierBase -{ -public: - EqualLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos) - : LayerVerifierBase(layerName, inputInfos, outputInfos) {} - - void VisitComparisonLayer(const armnn::IConnectableLayer* layer, - const armnn::ComparisonDescriptor& descriptor, - const char* name) override - { - VerifyNameAndConnections(layer, name); - BOOST_CHECK(descriptor.m_Operation == armnn::ComparisonOperation::Equal); - } - - void VisitEqualLayer(const armnn::IConnectableLayer*, const char*) override - { - throw armnn::Exception("EqualLayer should have translated to ComparisonLayer"); - } -}; - -// NOTE: Until the deprecated AddEqualLayer disappears this test checks that calling -// AddEqualLayer places a ComparisonLayer into the serialized format and that -// when this deserialises we have a ComparisonLayer -BOOST_AUTO_TEST_CASE(SerializeEqual) +BOOST_AUTO_TEST_CASE(SerializeDeserializeEqual) { - const std::string layerName("equal"); - - const armnn::TensorShape shape{2, 1, 2, 4}; - - const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32); - const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean); + const std::string layerName("EqualLayer"); + const armnn::TensorInfo inputTensorInfo1 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32); + const armnn::TensorInfo inputTensorInfo2 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32); + const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Boolean); armnn::INetworkPtr network = armnn::INetwork::Create(); - armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0); - armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1); + armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0); + armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1); ARMNN_NO_DEPRECATE_WARN_BEGIN armnn::IConnectableLayer* const equalLayer = network->AddEqualLayer(layerName.c_str()); ARMNN_NO_DEPRECATE_WARN_END armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0); - inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0)); - inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1)); + inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0)); + inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo1); + inputLayer2->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1)); + inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo2); equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0)); - - inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo); - inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo); - equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo); + equalLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - EqualLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }); - deserializedNetwork->Accept(verifier); -} - -BOOST_AUTO_TEST_CASE(EnsureEqualBackwardCompatibility) -{ - // The hex data below is a flat buffer containing a simple network with two inputs, - // an EqualLayer (now deprecated) and an output - // - // This test verifies that we can still deserialize this old-style model by replacing - // the EqualLayer with an equivalent ComparisonLayer - const std::vector equalModel = - { - 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, - 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, - 0xCC, 0x01, 0x00, 0x00, 0x20, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, - 0x60, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xFE, 0xFF, 0xFF, 0x04, 0x00, - 0x00, 0x00, 0x06, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xEA, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, - 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x64, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xB4, 0xFE, 0xFF, 0xFF, 0x00, 0x00, - 0x00, 0x13, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF, - 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00, - 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x71, 0x75, 0x61, 0x6C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, - 0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF, - 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x04, 0x08, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, - 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, - 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, - 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, - 0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, - 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, - 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, - 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, - 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, - 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00 - }; - - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(equalModel.begin(), equalModel.end())); - BOOST_CHECK(deserializedNetwork); - - const armnn::TensorShape shape{ 2, 1, 2, 4 }; - - const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32); - const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean); - - EqualLayerVerifier verifier("equal", { inputInfo, inputInfo }, { outputInfo }); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {inputTensorInfo1, inputTensorInfo2}, {outputTensorInfo}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeFill) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Fill) - const std::string layerName("fill"); const armnn::TensorInfo inputInfo({4}, armnn::DataType::Signed32); const armnn::TensorInfo outputInfo({1, 3, 3, 1}, armnn::DataType::Float32); @@ -1224,15 +704,13 @@ BOOST_AUTO_TEST_CASE(SerializeFill) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - FillLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor); + LayerVerifierBaseWithDescriptor verifier(layerName, {inputInfo}, {outputInfo}, descriptor); - deserializedNetwork->Accept(verifier); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeFloor) { - DECLARE_LAYER_VERIFIER_CLASS(Floor) - const std::string layerName("floor"); const armnn::TensorInfo info({4,4}, armnn::DataType::Float32); @@ -1250,51 +728,12 @@ BOOST_AUTO_TEST_CASE(SerializeFloor) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - FloorLayerVerifier verifier(layerName, {info}, {info}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {info}, {info}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeFullyConnected) { - using Descriptor = armnn::FullyConnectedDescriptor; - class FullyConnectedLayerVerifier : public LayerVerifierBaseWithDescriptor - { - public: - FullyConnectedLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const Descriptor& descriptor, - const armnn::ConstTensor& weight, - const armnn::Optional& bias) - : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) - , m_Weight(weight) - , m_Bias(bias) {} - - void VisitFullyConnectedLayer(const armnn::IConnectableLayer* layer, - const Descriptor& descriptor, - const armnn::ConstTensor& weight, - const armnn::Optional& bias, - const char* name) override - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - - CompareConstTensor(weight, m_Weight); - - BOOST_TEST(bias.has_value() == descriptor.m_BiasEnabled); - BOOST_TEST(bias.has_value() == m_Bias.has_value()); - - if (bias.has_value() && m_Bias.has_value()) - { - CompareConstTensor(bias.value(), m_Bias.value()); - } - } - - private: - armnn::ConstTensor m_Weight; - armnn::Optional m_Bias; - }; - const std::string layerName("fullyConnected"); const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32); @@ -1328,8 +767,10 @@ BOOST_AUTO_TEST_CASE(SerializeFullyConnected) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - FullyConnectedLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases); - deserializedNetwork->Accept(verifier); + const std::vector constants {weights, biases}; + LayerVerifierBaseWithDescriptorAndConstants verifier( + layerName, {inputInfo}, {outputInfo}, descriptor, constants); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeGather) @@ -1344,17 +785,26 @@ BOOST_AUTO_TEST_CASE(SerializeGather) const GatherDescriptor& descriptor) : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) {} - void VisitGatherLayer(const armnn::IConnectableLayer* layer, - const GatherDescriptor& descriptor, - const char *name) override + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override { - VerifyNameAndConnections(layer, name); - BOOST_CHECK(descriptor.m_Axis == m_Descriptor.m_Axis); + armnn::IgnoreUnused(constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Constant: break; + default: + { + VerifyNameAndConnections(layer, name); + const GatherDescriptor& layerDescriptor = static_cast(descriptor); + BOOST_CHECK(layerDescriptor.m_Axis == m_Descriptor.m_Axis); + } + } } - - void VisitConstantLayer(const armnn::IConnectableLayer*, - const armnn::ConstTensor&, - const char*) override {} }; const std::string layerName("gather"); @@ -1390,35 +840,14 @@ BOOST_AUTO_TEST_CASE(SerializeGather) BOOST_CHECK(deserializedNetwork); GatherLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo}, descriptor); - deserializedNetwork->Accept(verifier); + deserializedNetwork->ExecuteStrategy(verifier); } -class GreaterLayerVerifier : public LayerVerifierBase -{ -public: - GreaterLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos) - : LayerVerifierBase(layerName, inputInfos, outputInfos) {} - - void VisitComparisonLayer(const armnn::IConnectableLayer* layer, - const armnn::ComparisonDescriptor& descriptor, - const char* name) override - { - VerifyNameAndConnections(layer, name); - BOOST_CHECK(descriptor.m_Operation == armnn::ComparisonOperation::Greater); - } - - void VisitGreaterLayer(const armnn::IConnectableLayer*, const char*) override - { - throw armnn::Exception("GreaterLayer should have translated to ComparisonLayer"); - } -}; // NOTE: Until the deprecated AddGreaterLayer disappears this test checks that calling // AddGreaterLayer places a ComparisonLayer into the serialized format and that // when this deserialises we have a ComparisonLayer -BOOST_AUTO_TEST_CASE(SerializeGreater) +BOOST_AUTO_TEST_CASE(SerializeGreaterDeprecated) { const std::string layerName("greater"); @@ -1446,74 +875,13 @@ BOOST_AUTO_TEST_CASE(SerializeGreater) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - GreaterLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, { inputInfo, inputInfo }, { outputInfo }); + deserializedNetwork->ExecuteStrategy(verifier); } -BOOST_AUTO_TEST_CASE(EnsureGreaterBackwardCompatibility) -{ - // The hex data below is a flat buffer containing a simple network with two inputs, - // an GreaterLayer (now deprecated) and an output - // - // This test verifies that we can still deserialize this old-style model by replacing - // the GreaterLayer with an equivalent ComparisonLayer - const std::vector greaterModel = - { - 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, - 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, - 0xCC, 0x01, 0x00, 0x00, 0x20, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, - 0x60, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xFE, 0xFF, 0xFF, 0x04, 0x00, - 0x00, 0x00, 0x06, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xEA, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, - 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x64, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xB4, 0xFE, 0xFF, 0xFF, 0x00, 0x00, - 0x00, 0x19, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF, - 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00, - 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x67, 0x72, 0x65, 0x61, 0x74, 0x65, 0x72, 0x00, 0x02, 0x00, 0x00, 0x00, - 0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF, - 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x04, 0x08, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, - 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, - 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00, - 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, - 0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, - 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, - 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, - 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, - 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, - 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, - 0x02, 0x00, 0x00, 0x00 - }; - - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(greaterModel.begin(), greaterModel.end())); - BOOST_CHECK(deserializedNetwork); - - const armnn::TensorShape shape{ 1, 2, 2, 2 }; - - const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32); - const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean); - - GreaterLayerVerifier verifier("greater", { inputInfo, inputInfo }, { outputInfo }); - deserializedNetwork->Accept(verifier); -} BOOST_AUTO_TEST_CASE(SerializeInstanceNormalization) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(InstanceNormalization) - const std::string layerName("instanceNormalization"); const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32); @@ -1538,12 +906,11 @@ BOOST_AUTO_TEST_CASE(SerializeInstanceNormalization) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - InstanceNormalizationLayerVerifier verifier(layerName, {info}, {info}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {info}, {info}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } -DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(L2Normalization) - BOOST_AUTO_TEST_CASE(SerializeL2Normalization) { const std::string l2NormLayerName("l2Normalization"); @@ -1567,8 +934,9 @@ BOOST_AUTO_TEST_CASE(SerializeL2Normalization) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - L2NormalizationLayerVerifier verifier(l2NormLayerName, {info}, {info}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + l2NormLayerName, {info}, {info}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(EnsureL2NormalizationBackwardCompatibility) @@ -1623,14 +991,13 @@ BOOST_AUTO_TEST_CASE(EnsureL2NormalizationBackwardCompatibility) // Since this variable does not exist in the l2NormalizationModel dump, the default value will be loaded desc.m_Eps = 1e-12f; - L2NormalizationLayerVerifier verifier(layerName, {inputInfo}, {inputInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputInfo}, {inputInfo}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeLogicalBinary) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(LogicalBinary) - const std::string layerName("logicalBinaryAnd"); const armnn::TensorShape shape{2, 1, 2, 2}; @@ -1657,14 +1024,13 @@ BOOST_AUTO_TEST_CASE(SerializeLogicalBinary) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - LogicalBinaryLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeLogicalUnary) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(ElementwiseUnary) - const std::string layerName("elementwiseUnaryLogicalNot"); const armnn::TensorShape shape{2, 1, 2, 2}; @@ -1690,15 +1056,14 @@ BOOST_AUTO_TEST_CASE(SerializeLogicalUnary) BOOST_CHECK(deserializedNetwork); - ElementwiseUnaryLayerVerifier verifier(layerName, { inputInfo }, { outputInfo }, descriptor); + LayerVerifierBaseWithDescriptor verifier( + layerName, { inputInfo }, { outputInfo }, descriptor); - deserializedNetwork->Accept(verifier); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeLogSoftmax) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(LogSoftmax) - const std::string layerName("log_softmax"); const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32); @@ -1720,14 +1085,12 @@ BOOST_AUTO_TEST_CASE(SerializeLogSoftmax) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - LogSoftmaxLayerVerifier verifier(layerName, {info}, {info}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, {info}, {info}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeMaximum) { - DECLARE_LAYER_VERIFIER_CLASS(Maximum) - const std::string layerName("maximum"); const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32); @@ -1748,14 +1111,12 @@ BOOST_AUTO_TEST_CASE(SerializeMaximum) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - MaximumLayerVerifier verifier(layerName, {info, info}, {info}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {info, info}, {info}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeMean) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Mean) - const std::string layerName("mean"); const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32); @@ -1778,14 +1139,12 @@ BOOST_AUTO_TEST_CASE(SerializeMean) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - MeanLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, {inputInfo}, {outputInfo}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeMerge) { - DECLARE_LAYER_VERIFIER_CLASS(Merge) - const std::string layerName("merge"); const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32); @@ -1806,8 +1165,8 @@ BOOST_AUTO_TEST_CASE(SerializeMerge) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - MergeLayerVerifier verifier(layerName, {info, info}, {info}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {info, info}, {info}); + deserializedNetwork->ExecuteStrategy(verifier); } class MergerLayerVerifier : public LayerVerifierBaseWithDescriptor @@ -1819,19 +1178,35 @@ public: const armnn::OriginsDescriptor& descriptor) : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) {} - void VisitMergerLayer(const armnn::IConnectableLayer*, - const armnn::OriginsDescriptor&, - const char*) override - { - throw armnn::Exception("MergerLayer should have translated to ConcatLayer"); - } - - void VisitConcatLayer(const armnn::IConnectableLayer* layer, - const armnn::OriginsDescriptor& descriptor, - const char* name) override + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); + armnn::IgnoreUnused(descriptor, constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Merge: + { + throw armnn::Exception("MergerLayer should have translated to ConcatLayer"); + break; + } + case armnn::LayerType::Concat: + { + VerifyNameAndConnections(layer, name); + const armnn::MergerDescriptor& layerDescriptor = + static_cast(descriptor); + VerifyDescriptor(layerDescriptor); + break; + } + default: + { + throw armnn::Exception("Unexpected layer type in Merge test model"); + } + } } }; @@ -1870,7 +1245,7 @@ BOOST_AUTO_TEST_CASE(SerializeMerger) BOOST_CHECK(deserializedNetwork); MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor); - deserializedNetwork->Accept(verifier); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(EnsureMergerLayerBackwardCompatibility) @@ -1939,7 +1314,7 @@ BOOST_AUTO_TEST_CASE(EnsureMergerLayerBackwardCompatibility) armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0); MergerLayerVerifier verifier("merger", { inputInfo, inputInfo }, { outputInfo }, descriptor); - deserializedNetwork->Accept(verifier); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeConcat) @@ -1974,13 +1349,11 @@ BOOST_AUTO_TEST_CASE(SerializeConcat) // NOTE: using the MergerLayerVerifier to ensure that it is a concat layer and not a // merger layer that gets placed into the graph. MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor); - deserializedNetwork->Accept(verifier); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeMinimum) { - DECLARE_LAYER_VERIFIER_CLASS(Minimum) - const std::string layerName("minimum"); const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32); @@ -2001,14 +1374,12 @@ BOOST_AUTO_TEST_CASE(SerializeMinimum) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - MinimumLayerVerifier verifier(layerName, {info, info}, {info}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {info, info}, {info}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeMultiplication) { - DECLARE_LAYER_VERIFIER_CLASS(Multiplication) - const std::string layerName("multiplication"); const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32); @@ -2029,14 +1400,12 @@ BOOST_AUTO_TEST_CASE(SerializeMultiplication) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - MultiplicationLayerVerifier verifier(layerName, {info, info}, {info}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {info, info}, {info}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializePrelu) { - DECLARE_LAYER_VERIFIER_CLASS(Prelu) - const std::string layerName("prelu"); armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32); @@ -2060,14 +1429,12 @@ BOOST_AUTO_TEST_CASE(SerializePrelu) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - PreluLayerVerifier verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeNormalization) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Normalization) - const std::string layerName("normalization"); const armnn::TensorInfo info({2, 1, 2, 2}, armnn::DataType::Float32); @@ -2092,12 +1459,10 @@ BOOST_AUTO_TEST_CASE(SerializeNormalization) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - NormalizationLayerVerifier verifier(layerName, {info}, {info}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, {info}, {info}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } -DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Pad) - BOOST_AUTO_TEST_CASE(SerializePad) { const std::string layerName("pad"); @@ -2120,8 +1485,11 @@ BOOST_AUTO_TEST_CASE(SerializePad) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - PadLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, + {inputTensorInfo}, + {outputTensorInfo}, + desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(EnsurePadBackwardCompatibility) @@ -2174,14 +1542,12 @@ BOOST_AUTO_TEST_CASE(EnsurePadBackwardCompatibility) armnn::PadDescriptor descriptor({{ 0, 0 }, { 1, 0 }, { 1, 1 }, { 1, 2 }}); - PadLayerVerifier verifier("pad", { inputInfo }, { outputInfo }, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier("pad", { inputInfo }, { outputInfo }, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializePermute) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Permute) - const std::string layerName("permute"); const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32); const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32); @@ -2202,14 +1568,13 @@ BOOST_AUTO_TEST_CASE(SerializePermute) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - PermuteLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializePooling2d) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Pooling2d) - const std::string layerName("pooling2d"); const armnn::TensorInfo inputInfo({1, 2, 2, 1}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({1, 1, 1, 1}, armnn::DataType::Float32); @@ -2242,14 +1607,13 @@ BOOST_AUTO_TEST_CASE(SerializePooling2d) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - Pooling2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputInfo}, {outputInfo}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeQuantize) { - DECLARE_LAYER_VERIFIER_CLASS(Quantize) - const std::string layerName("quantize"); const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32); @@ -2267,14 +1631,12 @@ BOOST_AUTO_TEST_CASE(SerializeQuantize) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - QuantizeLayerVerifier verifier(layerName, {info}, {info}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {info}, {info}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeRank) { - DECLARE_LAYER_VERIFIER_CLASS(Rank) - const std::string layerName("rank"); const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({1}, armnn::DataType::Signed32); @@ -2293,14 +1655,12 @@ BOOST_AUTO_TEST_CASE(SerializeRank) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - RankLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeReduceSum) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Reduce) - const std::string layerName("Reduce_Sum"); const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32); @@ -2323,14 +1683,12 @@ BOOST_AUTO_TEST_CASE(SerializeReduceSum) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - ReduceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, {inputInfo}, {outputInfo}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeReshape) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Reshape) - const std::string layerName("reshape"); const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({3, 3}, armnn::DataType::Float32); @@ -2351,14 +1709,13 @@ BOOST_AUTO_TEST_CASE(SerializeReshape) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - ReshapeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputInfo}, {outputInfo}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeResize) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Resize) - const std::string layerName("resize"); const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32); @@ -2384,8 +1741,8 @@ BOOST_AUTO_TEST_CASE(SerializeResize) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - ResizeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, {inputInfo}, {outputInfo}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } class ResizeBilinearLayerVerifier : public LayerVerifierBaseWithDescriptor @@ -2398,25 +1755,36 @@ public: : LayerVerifierBaseWithDescriptor( layerName, inputInfos, outputInfos, descriptor) {} - void VisitResizeLayer(const armnn::IConnectableLayer* layer, - const armnn::ResizeDescriptor& descriptor, - const char* name) override - { - VerifyNameAndConnections(layer, name); - - BOOST_CHECK(descriptor.m_Method == armnn::ResizeMethod::Bilinear); - BOOST_CHECK(descriptor.m_TargetWidth == m_Descriptor.m_TargetWidth); - BOOST_CHECK(descriptor.m_TargetHeight == m_Descriptor.m_TargetHeight); - BOOST_CHECK(descriptor.m_DataLayout == m_Descriptor.m_DataLayout); - BOOST_CHECK(descriptor.m_AlignCorners == m_Descriptor.m_AlignCorners); - BOOST_CHECK(descriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters); - } - - void VisitResizeBilinearLayer(const armnn::IConnectableLayer*, - const armnn::ResizeBilinearDescriptor&, - const char*) override + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override { - throw armnn::Exception("ResizeBilinearLayer should have translated to ResizeLayer"); + armnn::IgnoreUnused(descriptor, constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Resize: + { + VerifyNameAndConnections(layer, name); + const armnn::ResizeDescriptor& layerDescriptor = + static_cast(descriptor); + BOOST_CHECK(layerDescriptor.m_Method == armnn::ResizeMethod::Bilinear); + BOOST_CHECK(layerDescriptor.m_TargetWidth == m_Descriptor.m_TargetWidth); + BOOST_CHECK(layerDescriptor.m_TargetHeight == m_Descriptor.m_TargetHeight); + BOOST_CHECK(layerDescriptor.m_DataLayout == m_Descriptor.m_DataLayout); + BOOST_CHECK(layerDescriptor.m_AlignCorners == m_Descriptor.m_AlignCorners); + BOOST_CHECK(layerDescriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters); + break; + } + default: + { + throw armnn::Exception("Unexpected layer type in test model. ResizeBiliniar " + "should have translated to Resize"); + } + } } }; @@ -2452,7 +1820,7 @@ BOOST_AUTO_TEST_CASE(SerializeResizeBilinear) BOOST_CHECK(deserializedNetwork); ResizeBilinearLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc); - deserializedNetwork->Accept(verifier); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(EnsureResizeBilinearBackwardCompatibility) @@ -2508,13 +1876,11 @@ BOOST_AUTO_TEST_CASE(EnsureResizeBilinearBackwardCompatibility) descriptor.m_TargetHeight = 2u; ResizeBilinearLayerVerifier verifier("resizeBilinear", { inputInfo }, { outputInfo }, descriptor); - deserializedNetwork->Accept(verifier); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeSlice) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Slice) - const std::string layerName{"slice"}; const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32); @@ -2537,14 +1903,12 @@ BOOST_AUTO_TEST_CASE(SerializeSlice) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - SliceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, {inputInfo}, {outputInfo}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeSoftmax) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Softmax) - const std::string layerName("softmax"); const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32); @@ -2565,14 +1929,12 @@ BOOST_AUTO_TEST_CASE(SerializeSoftmax) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - SoftmaxLayerVerifier verifier(layerName, {info}, {info}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier(layerName, {info}, {info}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeSpaceToBatchNd) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(SpaceToBatchNd) - const std::string layerName("spaceToBatchNd"); const armnn::TensorInfo inputInfo({2, 1, 2, 4}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({8, 1, 1, 3}, armnn::DataType::Float32); @@ -2596,14 +1958,13 @@ BOOST_AUTO_TEST_CASE(SerializeSpaceToBatchNd) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - SpaceToBatchNdLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputInfo}, {outputInfo}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeSpaceToDepth) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(SpaceToDepth) - const std::string layerName("spaceToDepth"); const armnn::TensorInfo inputInfo ({ 1, 16, 8, 3 }, armnn::DataType::Float32); @@ -2627,14 +1988,13 @@ BOOST_AUTO_TEST_CASE(SerializeSpaceToDepth) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - SpaceToDepthLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputInfo}, {outputInfo}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeSplitter) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Splitter) - const unsigned int numViews = 3; const unsigned int numDimensions = 4; const unsigned int inputShape[] = {1, 18, 4, 4}; @@ -2682,14 +2042,13 @@ BOOST_AUTO_TEST_CASE(SerializeSplitter) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - SplitterLayerVerifier verifier(layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeStack) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Stack) - const std::string layerName("stack"); armnn::TensorInfo inputTensorInfo ({4, 3, 5}, armnn::DataType::Float32); @@ -2714,14 +2073,13 @@ BOOST_AUTO_TEST_CASE(SerializeStack) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - StackLayerVerifier verifier(layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeStandIn) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(StandIn) - const std::string layerName("standIn"); armnn::TensorInfo tensorInfo({ 1u }, armnn::DataType::Float32); @@ -2749,14 +2107,13 @@ BOOST_AUTO_TEST_CASE(SerializeStandIn) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - StandInLayerVerifier verifier(layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeStridedSlice) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(StridedSlice) - const std::string layerName("stridedSlice"); const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32); const armnn::TensorInfo outputInfo = armnn::TensorInfo({3, 1}, armnn::DataType::Float32); @@ -2780,14 +2137,13 @@ BOOST_AUTO_TEST_CASE(SerializeStridedSlice) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - StridedSliceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputInfo}, {outputInfo}, desc); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeSubtraction) { - DECLARE_LAYER_VERIFIER_CLASS(Subtraction) - const std::string layerName("subtraction"); const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32); @@ -2808,8 +2164,8 @@ BOOST_AUTO_TEST_CASE(SerializeSubtraction) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - SubtractionLayerVerifier verifier(layerName, {info, info}, {info}); - deserializedNetwork->Accept(verifier); + LayerVerifierBase verifier(layerName, {info, info}, {info}); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeSwitch) @@ -2820,16 +2176,31 @@ BOOST_AUTO_TEST_CASE(SerializeSwitch) SwitchLayerVerifier(const std::string& layerName, const std::vector& inputInfos, const std::vector& outputInfos) - : LayerVerifierBase(layerName, inputInfos, outputInfos) {} + : LayerVerifierBase(layerName, inputInfos, outputInfos) {} - void VisitSwitchLayer(const armnn::IConnectableLayer* layer, const char* name) override + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override { - VerifyNameAndConnections(layer, name); + armnn::IgnoreUnused(descriptor, constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Constant: break; + case armnn::LayerType::Switch: + { + VerifyNameAndConnections(layer, name); + break; + } + default: + { + throw armnn::Exception("Unexpected layer type in Switch test model"); + } + } } - - void VisitConstantLayer(const armnn::IConnectableLayer*, - const armnn::ConstTensor&, - const char*) override {} }; const std::string layerName("switch"); @@ -2859,13 +2230,11 @@ BOOST_AUTO_TEST_CASE(SerializeSwitch) BOOST_CHECK(deserializedNetwork); SwitchLayerVerifier verifier(layerName, {info, info}, {info, info}); - deserializedNetwork->Accept(verifier); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeTranspose) { - DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Transpose) - const std::string layerName("transpose"); const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32); const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32); @@ -2886,54 +2255,13 @@ BOOST_AUTO_TEST_CASE(SerializeTranspose) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - TransposeLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor); - deserializedNetwork->Accept(verifier); + LayerVerifierBaseWithDescriptor verifier( + layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeTransposeConvolution2d) { - using Descriptor = armnn::TransposeConvolution2dDescriptor; - class TransposeConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor - { - public: - TransposeConvolution2dLayerVerifier(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases) - : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) - , m_Weights(weights) - , m_Biases(biases) - {} - - void VisitTransposeConvolution2dLayer(const armnn::IConnectableLayer* layer, - const Descriptor& descriptor, - const armnn::ConstTensor& weights, - const armnn::Optional& biases, - const char* name) override - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - - // check weights - CompareConstTensor(weights, m_Weights); - - // check biases - BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled); - BOOST_CHECK(biases.has_value() == m_Biases.has_value()); - - if (biases.has_value() && m_Biases.has_value()) - { - CompareConstTensor(biases.value(), m_Biases.value()); - } - } - - private: - armnn::ConstTensor m_Weights; - armnn::Optional m_Biases; - }; - const std::string layerName("transposeConvolution2d"); const armnn::TensorInfo inputInfo ({ 1, 7, 7, 1 }, armnn::DataType::Float32); const armnn::TensorInfo outputInfo({ 1, 9, 9, 1 }, armnn::DataType::Float32); @@ -2975,8 +2303,10 @@ BOOST_AUTO_TEST_CASE(SerializeTransposeConvolution2d) armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); BOOST_CHECK(deserializedNetwork); - TransposeConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases); - deserializedNetwork->Accept(verifier); + const std::vector constants {weights, biases}; + LayerVerifierBaseWithDescriptorAndConstants verifier( + layerName, {inputInfo}, {outputInfo}, descriptor, constants); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_CASE(SerializeDeserializeNonLinearNetwork) @@ -2991,16 +2321,31 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeNonLinearNetwork) : LayerVerifierBase(layerName, inputInfos, outputInfos) , m_LayerInput(layerInput) {} - void VisitConstantLayer(const armnn::IConnectableLayer* layer, - const armnn::ConstTensor& input, - const char* name) override + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector& constants, + const char* name, + const armnn::LayerBindingId id = 0) override { - VerifyNameAndConnections(layer, name); - CompareConstTensor(input, m_LayerInput); + armnn::IgnoreUnused(descriptor, constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Addition: break; + case armnn::LayerType::Constant: + { + VerifyNameAndConnections(layer, name); + CompareConstTensor(constants.at(0), m_LayerInput); + break; + } + default: + { + throw armnn::Exception("Unexpected layer type in test model"); + } + } } - void VisitAdditionLayer(const armnn::IConnectableLayer*, const char*) override {} - private: armnn::ConstTensor m_LayerInput; }; @@ -3029,2125 +2374,7 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeNonLinearNetwork) BOOST_CHECK(deserializedNetwork); ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor); - deserializedNetwork->Accept(verifier); -} - -class VerifyLstmLayer : public LayerVerifierBaseWithDescriptor -{ -public: - VerifyLstmLayer(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const armnn::LstmDescriptor& descriptor, - const armnn::LstmInputParams& inputParams) - : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) - , m_InputParams(inputParams) {} - - void VisitLstmLayer(const armnn::IConnectableLayer* layer, - const armnn::LstmDescriptor& descriptor, - const armnn::LstmInputParams& params, - const char* name) - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - VerifyInputParameters(params); - } - -protected: - void VerifyInputParameters(const armnn::LstmInputParams& params) - { - VerifyConstTensors( - "m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights); - VerifyConstTensors( - "m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights); - VerifyConstTensors( - "m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights); - VerifyConstTensors( - "m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights); - VerifyConstTensors( - "m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights); - VerifyConstTensors( - "m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights); - VerifyConstTensors( - "m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights); - VerifyConstTensors( - "m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights); - VerifyConstTensors( - "m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights); - VerifyConstTensors( - "m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights); - VerifyConstTensors( - "m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights); - VerifyConstTensors( - "m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias); - VerifyConstTensors( - "m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias); - VerifyConstTensors( - "m_CellBias", m_InputParams.m_CellBias, params.m_CellBias); - VerifyConstTensors( - "m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias); - VerifyConstTensors( - "m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights); - VerifyConstTensors( - "m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias); - VerifyConstTensors( - "m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights); - VerifyConstTensors( - "m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights); - VerifyConstTensors( - "m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights); - VerifyConstTensors( - "m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights); - } - -private: - armnn::LstmInputParams m_InputParams; -}; - -BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmCifgPeepholeNoProjection) -{ - armnn::LstmDescriptor descriptor; - descriptor.m_ActivationFunc = 4; - descriptor.m_ClippingThresProj = 0.0f; - descriptor.m_ClippingThresCell = 0.0f; - descriptor.m_CifgEnabled = true; // if this is true then we DON'T need to set the OptCifgParams - descriptor.m_ProjectionEnabled = false; - descriptor.m_PeepholeEnabled = true; - - const uint32_t batchSize = 1; - const uint32_t inputSize = 2; - const uint32_t numUnits = 4; - const uint32_t outputSize = numUnits; - - armnn::TensorInfo inputWeightsInfo1({numUnits, inputSize}, armnn::DataType::Float32); - std::vector inputToForgetWeightsData = GenerateRandomData(inputWeightsInfo1.GetNumElements()); - armnn::ConstTensor inputToForgetWeights(inputWeightsInfo1, inputToForgetWeightsData); - - std::vector inputToCellWeightsData = GenerateRandomData(inputWeightsInfo1.GetNumElements()); - armnn::ConstTensor inputToCellWeights(inputWeightsInfo1, inputToCellWeightsData); - - std::vector inputToOutputWeightsData = GenerateRandomData(inputWeightsInfo1.GetNumElements()); - armnn::ConstTensor inputToOutputWeights(inputWeightsInfo1, inputToOutputWeightsData); - - armnn::TensorInfo inputWeightsInfo2({numUnits, outputSize}, armnn::DataType::Float32); - std::vector recurrentToForgetWeightsData = GenerateRandomData(inputWeightsInfo2.GetNumElements()); - armnn::ConstTensor recurrentToForgetWeights(inputWeightsInfo2, recurrentToForgetWeightsData); - - std::vector recurrentToCellWeightsData = GenerateRandomData(inputWeightsInfo2.GetNumElements()); - armnn::ConstTensor recurrentToCellWeights(inputWeightsInfo2, recurrentToCellWeightsData); - - std::vector recurrentToOutputWeightsData = GenerateRandomData(inputWeightsInfo2.GetNumElements()); - armnn::ConstTensor recurrentToOutputWeights(inputWeightsInfo2, recurrentToOutputWeightsData); - - armnn::TensorInfo inputWeightsInfo3({numUnits}, armnn::DataType::Float32); - std::vector cellToForgetWeightsData = GenerateRandomData(inputWeightsInfo3.GetNumElements()); - armnn::ConstTensor cellToForgetWeights(inputWeightsInfo3, cellToForgetWeightsData); - - std::vector cellToOutputWeightsData = GenerateRandomData(inputWeightsInfo3.GetNumElements()); - armnn::ConstTensor cellToOutputWeights(inputWeightsInfo3, cellToOutputWeightsData); - - std::vector forgetGateBiasData(numUnits, 1.0f); - armnn::ConstTensor forgetGateBias(inputWeightsInfo3, forgetGateBiasData); - - std::vector cellBiasData(numUnits, 0.0f); - armnn::ConstTensor cellBias(inputWeightsInfo3, cellBiasData); - - std::vector outputGateBiasData(numUnits, 0.0f); - armnn::ConstTensor outputGateBias(inputWeightsInfo3, outputGateBiasData); - - armnn::LstmInputParams params; - params.m_InputToForgetWeights = &inputToForgetWeights; - params.m_InputToCellWeights = &inputToCellWeights; - params.m_InputToOutputWeights = &inputToOutputWeights; - params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; - params.m_RecurrentToCellWeights = &recurrentToCellWeights; - params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; - params.m_ForgetGateBias = &forgetGateBias; - params.m_CellBias = &cellBias; - params.m_OutputGateBias = &outputGateBias; - params.m_CellToForgetWeights = &cellToForgetWeights; - params.m_CellToOutputWeights = &cellToOutputWeights; - - armnn::INetworkPtr network = armnn::INetwork::Create(); - armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); - armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1); - armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2); - const std::string layerName("lstm"); - armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str()); - armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0); - armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1); - armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2); - armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3); - - // connect up - armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32); - armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32); - armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32); - armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 3 }, armnn::DataType::Float32); - - inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0)); - inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo); - - outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1)); - outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo); - - cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2)); - cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); - - lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0)); - lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff); - - lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0)); - lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo); - - lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0)); - lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo); - - lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0)); - lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo); - - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); - BOOST_CHECK(deserializedNetwork); - - VerifyLstmLayer checker( - layerName, - {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo}, - {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, - descriptor, - params); - deserializedNetwork->Accept(checker); -} - -BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeAndProjection) -{ - armnn::LstmDescriptor descriptor; - descriptor.m_ActivationFunc = 4; - descriptor.m_ClippingThresProj = 0.0f; - descriptor.m_ClippingThresCell = 0.0f; - descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams - descriptor.m_ProjectionEnabled = true; - descriptor.m_PeepholeEnabled = true; - - const uint32_t batchSize = 2; - const uint32_t inputSize = 5; - const uint32_t numUnits = 20; - const uint32_t outputSize = 16; - - armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32); - std::vector inputToInputWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); - armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData); - - std::vector inputToForgetWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); - armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData); - - std::vector inputToCellWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); - armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData); - - std::vector inputToOutputWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); - armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData); - - armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32); - std::vector inputGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData); - - std::vector forgetGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData); - - std::vector cellBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor cellBias(tensorInfo20, cellBiasData); - - std::vector outputGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData); - - armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32); - std::vector recurrentToInputWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); - armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData); - - std::vector recurrentToForgetWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); - armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData); - - std::vector recurrentToCellWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); - armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData); - - std::vector recurrentToOutputWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); - armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData); - - std::vector cellToInputWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData); - - std::vector cellToForgetWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData); - - std::vector cellToOutputWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData); - - armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32); - std::vector projectionWeightsData = GenerateRandomData(tensorInfo16x20.GetNumElements()); - armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData); - - armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32); - std::vector projectionBiasData(outputSize, 0.f); - armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData); - - armnn::LstmInputParams params; - params.m_InputToForgetWeights = &inputToForgetWeights; - params.m_InputToCellWeights = &inputToCellWeights; - params.m_InputToOutputWeights = &inputToOutputWeights; - params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; - params.m_RecurrentToCellWeights = &recurrentToCellWeights; - params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; - params.m_ForgetGateBias = &forgetGateBias; - params.m_CellBias = &cellBias; - params.m_OutputGateBias = &outputGateBias; - - // additional params because: descriptor.m_CifgEnabled = false - params.m_InputToInputWeights = &inputToInputWeights; - params.m_RecurrentToInputWeights = &recurrentToInputWeights; - params.m_CellToInputWeights = &cellToInputWeights; - params.m_InputGateBias = &inputGateBias; - - // additional params because: descriptor.m_ProjectionEnabled = true - params.m_ProjectionWeights = &projectionWeights; - params.m_ProjectionBias = &projectionBias; - - // additional params because: descriptor.m_PeepholeEnabled = true - params.m_CellToForgetWeights = &cellToForgetWeights; - params.m_CellToOutputWeights = &cellToOutputWeights; - - armnn::INetworkPtr network = armnn::INetwork::Create(); - armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); - armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1); - armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2); - const std::string layerName("lstm"); - armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str()); - armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0); - armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1); - armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2); - armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3); - - // connect up - armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32); - armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32); - armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32); - armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32); - - inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0)); - inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo); - - outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1)); - outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo); - - cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2)); - cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); - - lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0)); - lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff); - - lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0)); - lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo); - - lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0)); - lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo); - - lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0)); - lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo); - - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); - BOOST_CHECK(deserializedNetwork); - - VerifyLstmLayer checker( - layerName, - {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo}, - {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, - descriptor, - params); - deserializedNetwork->Accept(checker); -} - -BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeWithProjectionWithLayerNorm) -{ - armnn::LstmDescriptor descriptor; - descriptor.m_ActivationFunc = 4; - descriptor.m_ClippingThresProj = 0.0f; - descriptor.m_ClippingThresCell = 0.0f; - descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams - descriptor.m_ProjectionEnabled = true; - descriptor.m_PeepholeEnabled = true; - descriptor.m_LayerNormEnabled = true; - - const uint32_t batchSize = 2; - const uint32_t inputSize = 5; - const uint32_t numUnits = 20; - const uint32_t outputSize = 16; - - armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32); - std::vector inputToInputWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); - armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData); - - std::vector inputToForgetWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); - armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData); - - std::vector inputToCellWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); - armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData); - - std::vector inputToOutputWeightsData = GenerateRandomData(tensorInfo20x5.GetNumElements()); - armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData); - - armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32); - std::vector inputGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData); - - std::vector forgetGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData); - - std::vector cellBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor cellBias(tensorInfo20, cellBiasData); - - std::vector outputGateBiasData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData); - - armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32); - std::vector recurrentToInputWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); - armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData); - - std::vector recurrentToForgetWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); - armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData); - - std::vector recurrentToCellWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); - armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData); - - std::vector recurrentToOutputWeightsData = GenerateRandomData(tensorInfo20x16.GetNumElements()); - armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData); - - std::vector cellToInputWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData); - - std::vector cellToForgetWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData); - - std::vector cellToOutputWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData); - - armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32); - std::vector projectionWeightsData = GenerateRandomData(tensorInfo16x20.GetNumElements()); - armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData); - - armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32); - std::vector projectionBiasData(outputSize, 0.f); - armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData); - - std::vector inputLayerNormWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor inputLayerNormWeights(tensorInfo20, forgetGateBiasData); - - std::vector forgetLayerNormWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor forgetLayerNormWeights(tensorInfo20, forgetGateBiasData); - - std::vector cellLayerNormWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor cellLayerNormWeights(tensorInfo20, forgetGateBiasData); - - std::vector outLayerNormWeightsData = GenerateRandomData(tensorInfo20.GetNumElements()); - armnn::ConstTensor outLayerNormWeights(tensorInfo20, forgetGateBiasData); - - armnn::LstmInputParams params; - params.m_InputToForgetWeights = &inputToForgetWeights; - params.m_InputToCellWeights = &inputToCellWeights; - params.m_InputToOutputWeights = &inputToOutputWeights; - params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; - params.m_RecurrentToCellWeights = &recurrentToCellWeights; - params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; - params.m_ForgetGateBias = &forgetGateBias; - params.m_CellBias = &cellBias; - params.m_OutputGateBias = &outputGateBias; - - // additional params because: descriptor.m_CifgEnabled = false - params.m_InputToInputWeights = &inputToInputWeights; - params.m_RecurrentToInputWeights = &recurrentToInputWeights; - params.m_CellToInputWeights = &cellToInputWeights; - params.m_InputGateBias = &inputGateBias; - - // additional params because: descriptor.m_ProjectionEnabled = true - params.m_ProjectionWeights = &projectionWeights; - params.m_ProjectionBias = &projectionBias; - - // additional params because: descriptor.m_PeepholeEnabled = true - params.m_CellToForgetWeights = &cellToForgetWeights; - params.m_CellToOutputWeights = &cellToOutputWeights; - - // additional params because: despriptor.m_LayerNormEnabled = true - params.m_InputLayerNormWeights = &inputLayerNormWeights; - params.m_ForgetLayerNormWeights = &forgetLayerNormWeights; - params.m_CellLayerNormWeights = &cellLayerNormWeights; - params.m_OutputLayerNormWeights = &outLayerNormWeights; - - armnn::INetworkPtr network = armnn::INetwork::Create(); - armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); - armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1); - armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2); - const std::string layerName("lstm"); - armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str()); - armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0); - armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1); - armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2); - armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3); - - // connect up - armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32); - armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32); - armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32); - armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32); - - inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0)); - inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo); - - outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1)); - outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo); - - cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2)); - cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); - - lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0)); - lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff); - - lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0)); - lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo); - - lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0)); - lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo); - - lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0)); - lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo); - - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); - BOOST_CHECK(deserializedNetwork); - - VerifyLstmLayer checker( - layerName, - {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo}, - {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, - descriptor, - params); - deserializedNetwork->Accept(checker); -} - -BOOST_AUTO_TEST_CASE(EnsureLstmLayersBackwardCompatibility) -{ - // The hex data below is a flat buffer containing a lstm layer with no Cifg, with peephole and projection - // enabled. That data was obtained before additional layer normalization parameters where added to the - // lstm serializer. That way it can be tested if a lstm model with the old parameter configuration can - // still be loaded - const std::vector lstmNoCifgWithPeepholeAndProjectionModel = - { - 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, - 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, - 0xDC, 0x29, 0x00, 0x00, 0x38, 0x29, 0x00, 0x00, 0xB4, 0x28, 0x00, 0x00, 0x94, 0x01, 0x00, 0x00, 0x3C, 0x01, - 0x00, 0x00, 0xE0, 0x00, 0x00, 0x00, 0x84, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, - 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x70, 0xD6, 0xFF, 0xFF, - 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x06, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x88, 0xD7, - 0xFF, 0xFF, 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xF6, 0xD6, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00, - 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0xE8, 0xD7, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xC8, 0xD6, 0xFF, 0xFF, 0x00, 0x00, - 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x5E, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xE0, 0xD7, 0xFF, 0xFF, - 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x4E, 0xD7, 0xFF, 0xFF, 0x06, 0x00, 0x00, 0x00, 0x10, 0x00, - 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0xD8, - 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, - 0x04, 0x00, 0x00, 0x00, 0xB6, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x38, 0xD8, 0xFF, 0xFF, 0x08, 0x00, - 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xA6, 0xD7, 0xFF, 0xFF, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, - 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x98, 0xD8, 0xFF, 0xFF, - 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x78, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, - 0x00, 0x00, 0x0E, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x16, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, - 0xFA, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, - 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xEC, 0xD8, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x6C, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x23, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, - 0x12, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0xE0, 0x25, 0x00, 0x00, 0xD0, 0x25, - 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x26, 0x00, 0x48, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, - 0x10, 0x00, 0x14, 0x00, 0x18, 0x00, 0x1C, 0x00, 0x20, 0x00, 0x24, 0x00, 0x28, 0x00, 0x2C, 0x00, 0x30, 0x00, - 0x34, 0x00, 0x38, 0x00, 0x3C, 0x00, 0x40, 0x00, 0x44, 0x00, 0x26, 0x00, 0x00, 0x00, 0xC4, 0x23, 0x00, 0x00, - 0xF8, 0x21, 0x00, 0x00, 0x2C, 0x20, 0x00, 0x00, 0xF0, 0x1A, 0x00, 0x00, 0xB4, 0x15, 0x00, 0x00, 0x78, 0x10, - 0x00, 0x00, 0xF0, 0x0F, 0x00, 0x00, 0x68, 0x0F, 0x00, 0x00, 0xE0, 0x0E, 0x00, 0x00, 0x14, 0x0D, 0x00, 0x00, - 0xD8, 0x07, 0x00, 0x00, 0x50, 0x07, 0x00, 0x00, 0xC8, 0x06, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x14, 0x01, - 0x00, 0x00, 0x8C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, - 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x5A, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, - 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x72, 0xD8, - 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x82, 0xD9, 0xFF, 0xFF, - 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDE, 0xD8, - 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x14, 0x00, 0x00, 0x00, 0xF6, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x54, 0x00, 0x00, 0x00, 0x04, 0x00, - 0x00, 0x00, 0x06, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xD9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6A, 0xD9, 0xFF, 0xFF, 0x00, 0x00, - 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7A, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, - 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x86, 0xDE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xA2, 0xDE, - 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB2, 0xDF, 0xFF, 0xFF, - 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0xDF, - 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x14, 0x00, 0x00, 0x00, 0x26, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, - 0x00, 0x00, 0x36, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x92, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xAA, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, - 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xBA, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0xC6, 0xE4, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xE2, 0xE4, 0xFF, 0xFF, - 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF2, 0xE5, 0xFF, 0xFF, 0x04, 0x00, - 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8E, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, - 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, - 0x00, 0x00, 0xAA, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, - 0xBA, 0xE7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x16, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x2E, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, - 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x3E, 0xE8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9A, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xB2, 0xE7, 0xFF, 0xFF, - 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xC2, 0xE8, 0xFF, 0xFF, 0x04, 0x00, - 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1E, 0xE8, 0xFF, 0xFF, - 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, - 0x00, 0x00, 0x36, 0xE8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, - 0x46, 0xE9, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xED, 0xFF, 0xFF, - 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, - 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6E, 0xED, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0x7E, 0xEE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x8A, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, - 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xA6, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, - 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB6, 0xF3, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0xC2, 0xF7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xDE, 0xF7, 0xFF, 0xFF, - 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xF8, 0xFF, 0xFF, 0x04, 0x00, - 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8A, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, - 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, - 0x00, 0x00, 0xA6, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, - 0xB6, 0xFA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xFB, - 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, - 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x6E, 0xFB, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, - 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFC, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x1A, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x0C, 0x00, - 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x01, 0x04, 0x00, 0x00, 0x00, 0x2E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, - 0x22, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6C, 0x73, - 0x74, 0x6D, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xEC, 0x00, 0x00, 0x00, 0xD0, 0x00, 0x00, 0x00, - 0xB4, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x88, 0x00, 0x00, 0x00, 0x5C, 0x00, 0x00, 0x00, 0x30, 0x00, - 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x14, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, - 0xA6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, - 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x3C, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x64, 0xFF, 0xFF, 0xFF, - 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, - 0xB4, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x1A, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, - 0xF0, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, - 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, - 0x7E, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, - 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x76, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, - 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, - 0x68, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, - 0x08, 0x00, 0x0E, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, - 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, - 0x0E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, - 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, - 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6E, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x08, 0x00, - 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00, - 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00, - 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, - 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, - 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, - 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00, - 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, - 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00 - }; - - armnn::INetworkPtr deserializedNetwork = - DeserializeNetwork(std::string(lstmNoCifgWithPeepholeAndProjectionModel.begin(), - lstmNoCifgWithPeepholeAndProjectionModel.end())); - - BOOST_CHECK(deserializedNetwork); - - // generating the same model parameters which where used to serialize the model (Layer norm is not specified) - armnn::LstmDescriptor descriptor; - descriptor.m_ActivationFunc = 4; - descriptor.m_ClippingThresProj = 0.0f; - descriptor.m_ClippingThresCell = 0.0f; - descriptor.m_CifgEnabled = false; - descriptor.m_ProjectionEnabled = true; - descriptor.m_PeepholeEnabled = true; - - const uint32_t batchSize = 2u; - const uint32_t inputSize = 5u; - const uint32_t numUnits = 20u; - const uint32_t outputSize = 16u; - - armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32); - std::vector inputToInputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f); - armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData); - - std::vector inputToForgetWeightsData(tensorInfo20x5.GetNumElements(), 0.0f); - armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData); - - std::vector inputToCellWeightsData(tensorInfo20x5.GetNumElements(), 0.0f); - armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData); - - std::vector inputToOutputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f); - armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData); - - armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32); - std::vector inputGateBiasData(tensorInfo20.GetNumElements(), 0.0f); - armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData); - - std::vector forgetGateBiasData(tensorInfo20.GetNumElements(), 0.0f); - armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData); - - std::vector cellBiasData(tensorInfo20.GetNumElements(), 0.0f); - armnn::ConstTensor cellBias(tensorInfo20, cellBiasData); - - std::vector outputGateBiasData(tensorInfo20.GetNumElements(), 0.0f); - armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData); - - armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32); - std::vector recurrentToInputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f); - armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData); - - std::vector recurrentToForgetWeightsData(tensorInfo20x16.GetNumElements(), 0.0f); - armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData); - - std::vector recurrentToCellWeightsData(tensorInfo20x16.GetNumElements(), 0.0f); - armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData); - - std::vector recurrentToOutputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f); - armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData); - - std::vector cellToInputWeightsData(tensorInfo20.GetNumElements(), 0.0f); - armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData); - - std::vector cellToForgetWeightsData(tensorInfo20.GetNumElements(), 0.0f); - armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData); - - std::vector cellToOutputWeightsData(tensorInfo20.GetNumElements(), 0.0f); - armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData); - - armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32); - std::vector projectionWeightsData(tensorInfo16x20.GetNumElements(), 0.0f); - armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData); - - armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32); - std::vector projectionBiasData(outputSize, 0.0f); - armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData); - - armnn::LstmInputParams params; - params.m_InputToForgetWeights = &inputToForgetWeights; - params.m_InputToCellWeights = &inputToCellWeights; - params.m_InputToOutputWeights = &inputToOutputWeights; - params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; - params.m_RecurrentToCellWeights = &recurrentToCellWeights; - params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; - params.m_ForgetGateBias = &forgetGateBias; - params.m_CellBias = &cellBias; - params.m_OutputGateBias = &outputGateBias; - - // additional params because: descriptor.m_CifgEnabled = false - params.m_InputToInputWeights = &inputToInputWeights; - params.m_RecurrentToInputWeights = &recurrentToInputWeights; - params.m_CellToInputWeights = &cellToInputWeights; - params.m_InputGateBias = &inputGateBias; - - // additional params because: descriptor.m_ProjectionEnabled = true - params.m_ProjectionWeights = &projectionWeights; - params.m_ProjectionBias = &projectionBias; - - // additional params because: descriptor.m_PeepholeEnabled = true - params.m_CellToForgetWeights = &cellToForgetWeights; - params.m_CellToOutputWeights = &cellToOutputWeights; - - const std::string layerName("lstm"); - armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32); - armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32); - armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32); - armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32); - - VerifyLstmLayer checker( - layerName, - {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo}, - {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, - descriptor, - params); - deserializedNetwork->Accept(checker); -} -class VerifyQuantizedLstmLayer : public LayerVerifierBase -{ - -public: - VerifyQuantizedLstmLayer(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const armnn::QuantizedLstmInputParams& inputParams) - : LayerVerifierBase(layerName, inputInfos, outputInfos), m_InputParams(inputParams) {} - - void VisitQuantizedLstmLayer(const armnn::IConnectableLayer* layer, - const armnn::QuantizedLstmInputParams& params, - const char* name) - { - VerifyNameAndConnections(layer, name); - VerifyInputParameters(params); - } - -protected: - void VerifyInputParameters(const armnn::QuantizedLstmInputParams& params) - { - VerifyConstTensors("m_InputToInputWeights", - m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights); - VerifyConstTensors("m_InputToForgetWeights", - m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights); - VerifyConstTensors("m_InputToCellWeights", - m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights); - VerifyConstTensors("m_InputToOutputWeights", - m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights); - VerifyConstTensors("m_RecurrentToInputWeights", - m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights); - VerifyConstTensors("m_RecurrentToForgetWeights", - m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights); - VerifyConstTensors("m_RecurrentToCellWeights", - m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights); - VerifyConstTensors("m_RecurrentToOutputWeights", - m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights); - VerifyConstTensors("m_InputGateBias", - m_InputParams.m_InputGateBias, params.m_InputGateBias); - VerifyConstTensors("m_ForgetGateBias", - m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias); - VerifyConstTensors("m_CellBias", - m_InputParams.m_CellBias, params.m_CellBias); - VerifyConstTensors("m_OutputGateBias", - m_InputParams.m_OutputGateBias, params.m_OutputGateBias); - } - -private: - armnn::QuantizedLstmInputParams m_InputParams; -}; - -BOOST_AUTO_TEST_CASE(SerializeDeserializeQuantizedLstm) -{ - const uint32_t batchSize = 1; - const uint32_t inputSize = 2; - const uint32_t numUnits = 4; - const uint32_t outputSize = numUnits; - - // Scale/Offset for input/output, cellState In/Out, weights, bias - float inputOutputScale = 0.0078125f; - int32_t inputOutputOffset = 128; - - float cellStateScale = 0.00048828125f; - int32_t cellStateOffset = 0; - - float weightsScale = 0.00408021f; - int32_t weightsOffset = 100; - - float biasScale = 3.1876640625e-05f; - int32_t biasOffset = 0; - - // The shape of weight data is {outputSize, inputSize} = {4, 2} - armnn::TensorShape inputToInputWeightsShape = {4, 2}; - std::vector inputToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8}; - armnn::TensorInfo inputToInputWeightsInfo(inputToInputWeightsShape, - armnn::DataType::QAsymmU8, - weightsScale, - weightsOffset); - armnn::ConstTensor inputToInputWeights(inputToInputWeightsInfo, inputToInputWeightsData); - - armnn::TensorShape inputToForgetWeightsShape = {4, 2}; - std::vector inputToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8}; - armnn::TensorInfo inputToForgetWeightsInfo(inputToForgetWeightsShape, - armnn::DataType::QAsymmU8, - weightsScale, - weightsOffset); - armnn::ConstTensor inputToForgetWeights(inputToForgetWeightsInfo, inputToForgetWeightsData); - - armnn::TensorShape inputToCellWeightsShape = {4, 2}; - std::vector inputToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8}; - armnn::TensorInfo inputToCellWeightsInfo(inputToCellWeightsShape, - armnn::DataType::QAsymmU8, - weightsScale, - weightsOffset); - armnn::ConstTensor inputToCellWeights(inputToCellWeightsInfo, inputToCellWeightsData); - - armnn::TensorShape inputToOutputWeightsShape = {4, 2}; - std::vector inputToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8}; - armnn::TensorInfo inputToOutputWeightsInfo(inputToOutputWeightsShape, - armnn::DataType::QAsymmU8, - weightsScale, - weightsOffset); - armnn::ConstTensor inputToOutputWeights(inputToOutputWeightsInfo, inputToOutputWeightsData); - - // The shape of recurrent weight data is {outputSize, outputSize} = {4, 4} - armnn::TensorShape recurrentToInputWeightsShape = {4, 4}; - std::vector recurrentToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; - armnn::TensorInfo recurrentToInputWeightsInfo(recurrentToInputWeightsShape, - armnn::DataType::QAsymmU8, - weightsScale, - weightsOffset); - armnn::ConstTensor recurrentToInputWeights(recurrentToInputWeightsInfo, recurrentToInputWeightsData); - - armnn::TensorShape recurrentToForgetWeightsShape = {4, 4}; - std::vector recurrentToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; - armnn::TensorInfo recurrentToForgetWeightsInfo(recurrentToForgetWeightsShape, - armnn::DataType::QAsymmU8, - weightsScale, - weightsOffset); - armnn::ConstTensor recurrentToForgetWeights(recurrentToForgetWeightsInfo, recurrentToForgetWeightsData); - - armnn::TensorShape recurrentToCellWeightsShape = {4, 4}; - std::vector recurrentToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; - armnn::TensorInfo recurrentToCellWeightsInfo(recurrentToCellWeightsShape, - armnn::DataType::QAsymmU8, - weightsScale, - weightsOffset); - armnn::ConstTensor recurrentToCellWeights(recurrentToCellWeightsInfo, recurrentToCellWeightsData); - - armnn::TensorShape recurrentToOutputWeightsShape = {4, 4}; - std::vector recurrentToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; - armnn::TensorInfo recurrentToOutputWeightsInfo(recurrentToOutputWeightsShape, - armnn::DataType::QAsymmU8, - weightsScale, - weightsOffset); - armnn::ConstTensor recurrentToOutputWeights(recurrentToOutputWeightsInfo, recurrentToOutputWeightsData); - - // The shape of bias data is {outputSize} = {4} - armnn::TensorShape inputGateBiasShape = {4}; - std::vector inputGateBiasData = {1, 2, 3, 4}; - armnn::TensorInfo inputGateBiasInfo(inputGateBiasShape, - armnn::DataType::Signed32, - biasScale, - biasOffset); - armnn::ConstTensor inputGateBias(inputGateBiasInfo, inputGateBiasData); - - armnn::TensorShape forgetGateBiasShape = {4}; - std::vector forgetGateBiasData = {1, 2, 3, 4}; - armnn::TensorInfo forgetGateBiasInfo(forgetGateBiasShape, - armnn::DataType::Signed32, - biasScale, - biasOffset); - armnn::ConstTensor forgetGateBias(forgetGateBiasInfo, forgetGateBiasData); - - armnn::TensorShape cellBiasShape = {4}; - std::vector cellBiasData = {1, 2, 3, 4}; - armnn::TensorInfo cellBiasInfo(cellBiasShape, - armnn::DataType::Signed32, - biasScale, - biasOffset); - armnn::ConstTensor cellBias(cellBiasInfo, cellBiasData); - - armnn::TensorShape outputGateBiasShape = {4}; - std::vector outputGateBiasData = {1, 2, 3, 4}; - armnn::TensorInfo outputGateBiasInfo(outputGateBiasShape, - armnn::DataType::Signed32, - biasScale, - biasOffset); - armnn::ConstTensor outputGateBias(outputGateBiasInfo, outputGateBiasData); - - armnn::QuantizedLstmInputParams params; - params.m_InputToInputWeights = &inputToInputWeights; - params.m_InputToForgetWeights = &inputToForgetWeights; - params.m_InputToCellWeights = &inputToCellWeights; - params.m_InputToOutputWeights = &inputToOutputWeights; - params.m_RecurrentToInputWeights = &recurrentToInputWeights; - params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; - params.m_RecurrentToCellWeights = &recurrentToCellWeights; - params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; - params.m_InputGateBias = &inputGateBias; - params.m_ForgetGateBias = &forgetGateBias; - params.m_CellBias = &cellBias; - params.m_OutputGateBias = &outputGateBias; - - armnn::INetworkPtr network = armnn::INetwork::Create(); - armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); - armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1); - armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2); - const std::string layerName("QuantizedLstm"); - armnn::IConnectableLayer* const quantizedLstmLayer = network->AddQuantizedLstmLayer(params, layerName.c_str()); - armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(0); - armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(1); - - // Connect up - armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, - armnn::DataType::QAsymmU8, - inputOutputScale, - inputOutputOffset); - armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits }, - armnn::DataType::QSymmS16, - cellStateScale, - cellStateOffset); - armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, - armnn::DataType::QAsymmU8, - inputOutputScale, - inputOutputOffset); - - inputLayer->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(0)); - inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo); - - cellStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(1)); - cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); - - outputStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(2)); - outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo); - - quantizedLstmLayer->GetOutputSlot(0).Connect(cellStateOut->GetInputSlot(0)); - quantizedLstmLayer->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo); - - quantizedLstmLayer->GetOutputSlot(1).Connect(outputLayer->GetInputSlot(0)); - quantizedLstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo); - - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); - BOOST_CHECK(deserializedNetwork); - - VerifyQuantizedLstmLayer checker(layerName, - {inputTensorInfo, cellStateTensorInfo, outputStateTensorInfo}, - {cellStateTensorInfo, outputStateTensorInfo}, - params); - - deserializedNetwork->Accept(checker); -} - -class VerifyQLstmLayer : public LayerVerifierBaseWithDescriptor -{ -public: - VerifyQLstmLayer(const std::string& layerName, - const std::vector& inputInfos, - const std::vector& outputInfos, - const armnn::QLstmDescriptor& descriptor, - const armnn::LstmInputParams& inputParams) - : LayerVerifierBaseWithDescriptor(layerName, inputInfos, outputInfos, descriptor) - , m_InputParams(inputParams) {} - - void VisitQLstmLayer(const armnn::IConnectableLayer* layer, - const armnn::QLstmDescriptor& descriptor, - const armnn::LstmInputParams& params, - const char* name) - { - VerifyNameAndConnections(layer, name); - VerifyDescriptor(descriptor); - VerifyInputParameters(params); - } - -protected: - void VerifyInputParameters(const armnn::LstmInputParams& params) - { - VerifyConstTensors( - "m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights); - VerifyConstTensors( - "m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights); - VerifyConstTensors( - "m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights); - VerifyConstTensors( - "m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights); - VerifyConstTensors( - "m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights); - VerifyConstTensors( - "m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights); - VerifyConstTensors( - "m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights); - VerifyConstTensors( - "m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights); - VerifyConstTensors( - "m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights); - VerifyConstTensors( - "m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights); - VerifyConstTensors( - "m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights); - VerifyConstTensors( - "m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias); - VerifyConstTensors( - "m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias); - VerifyConstTensors( - "m_CellBias", m_InputParams.m_CellBias, params.m_CellBias); - VerifyConstTensors( - "m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias); - VerifyConstTensors( - "m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights); - VerifyConstTensors( - "m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias); - VerifyConstTensors( - "m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights); - VerifyConstTensors( - "m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights); - VerifyConstTensors( - "m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights); - VerifyConstTensors( - "m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights); - } - -private: - armnn::LstmInputParams m_InputParams; -}; - -BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmBasic) -{ - armnn::QLstmDescriptor descriptor; - - descriptor.m_CifgEnabled = true; - descriptor.m_ProjectionEnabled = false; - descriptor.m_PeepholeEnabled = false; - descriptor.m_LayerNormEnabled = false; - - descriptor.m_CellClip = 0.0f; - descriptor.m_ProjectionClip = 0.0f; - - descriptor.m_InputIntermediateScale = 0.00001f; - descriptor.m_ForgetIntermediateScale = 0.00001f; - descriptor.m_CellIntermediateScale = 0.00001f; - descriptor.m_OutputIntermediateScale = 0.00001f; - - descriptor.m_HiddenStateScale = 0.07f; - descriptor.m_HiddenStateZeroPoint = 0; - - const unsigned int numBatches = 2; - const unsigned int inputSize = 5; - const unsigned int outputSize = 4; - const unsigned int numUnits = 4; - - // Scale/Offset quantization info - float inputScale = 0.0078f; - int32_t inputOffset = 0; - - float outputScale = 0.0078f; - int32_t outputOffset = 0; - - float cellStateScale = 3.5002e-05f; - int32_t cellStateOffset = 0; - - float weightsScale = 0.007f; - int32_t weightsOffset = 0; - - float biasScale = 3.5002e-05f / 1024; - int32_t biasOffset = 0; - - // Weights and bias tensor and quantization info - armnn::TensorInfo inputWeightsInfo({numUnits, inputSize}, - armnn::DataType::QSymmS8, - weightsScale, - weightsOffset); - - armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize}, - armnn::DataType::QSymmS8, - weightsScale, - weightsOffset); - - armnn::TensorInfo biasInfo({numUnits}, armnn::DataType::Signed32, biasScale, biasOffset); - - std::vector inputToForgetWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - std::vector inputToCellWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - std::vector inputToOutputWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - - armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData); - armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData); - armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData); - - std::vector recurrentToForgetWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - std::vector recurrentToCellWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - std::vector recurrentToOutputWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - - armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData); - armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData); - armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData); - - std::vector forgetGateBiasData(numUnits, 1); - std::vector cellBiasData(numUnits, 0); - std::vector outputGateBiasData(numUnits, 0); - - armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData); - armnn::ConstTensor cellBias(biasInfo, cellBiasData); - armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData); - - // Set up params - armnn::LstmInputParams params; - params.m_InputToForgetWeights = &inputToForgetWeights; - params.m_InputToCellWeights = &inputToCellWeights; - params.m_InputToOutputWeights = &inputToOutputWeights; - - params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; - params.m_RecurrentToCellWeights = &recurrentToCellWeights; - params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; - - params.m_ForgetGateBias = &forgetGateBias; - params.m_CellBias = &cellBias; - params.m_OutputGateBias = &outputGateBias; - - // Create network - armnn::INetworkPtr network = armnn::INetwork::Create(); - const std::string layerName("qLstm"); - - armnn::IConnectableLayer* const input = network->AddInputLayer(0); - armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1); - armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2); - - armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str()); - - armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0); - armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1); - armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2); - - // Input/Output tensor info - armnn::TensorInfo inputInfo({numBatches , inputSize}, - armnn::DataType::QAsymmS8, - inputScale, - inputOffset); - - armnn::TensorInfo cellStateInfo({numBatches , numUnits}, - armnn::DataType::QSymmS16, - cellStateScale, - cellStateOffset); - - armnn::TensorInfo outputStateInfo({numBatches , outputSize}, - armnn::DataType::QAsymmS8, - outputScale, - outputOffset); - - // Connect input/output slots - input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0)); - input->GetOutputSlot(0).SetTensorInfo(inputInfo); - - outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1)); - outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo); - - cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2)); - cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo); - - qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0)); - qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo); - - qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0)); - qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo); - - qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0)); - qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo); - - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); - BOOST_CHECK(deserializedNetwork); - - VerifyQLstmLayer checker(layerName, - {inputInfo, cellStateInfo, outputStateInfo}, - {outputStateInfo, cellStateInfo, outputStateInfo}, - descriptor, - params); - - deserializedNetwork->Accept(checker); -} - -BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmCifgLayerNorm) -{ - armnn::QLstmDescriptor descriptor; - - // CIFG params are used when CIFG is disabled - descriptor.m_CifgEnabled = true; - descriptor.m_ProjectionEnabled = false; - descriptor.m_PeepholeEnabled = false; - descriptor.m_LayerNormEnabled = true; - - descriptor.m_CellClip = 0.0f; - descriptor.m_ProjectionClip = 0.0f; - - descriptor.m_InputIntermediateScale = 0.00001f; - descriptor.m_ForgetIntermediateScale = 0.00001f; - descriptor.m_CellIntermediateScale = 0.00001f; - descriptor.m_OutputIntermediateScale = 0.00001f; - - descriptor.m_HiddenStateScale = 0.07f; - descriptor.m_HiddenStateZeroPoint = 0; - - const unsigned int numBatches = 2; - const unsigned int inputSize = 5; - const unsigned int outputSize = 4; - const unsigned int numUnits = 4; - - // Scale/Offset quantization info - float inputScale = 0.0078f; - int32_t inputOffset = 0; - - float outputScale = 0.0078f; - int32_t outputOffset = 0; - - float cellStateScale = 3.5002e-05f; - int32_t cellStateOffset = 0; - - float weightsScale = 0.007f; - int32_t weightsOffset = 0; - - float layerNormScale = 3.5002e-05f; - int32_t layerNormOffset = 0; - - float biasScale = layerNormScale / 1024; - int32_t biasOffset = 0; - - // Weights and bias tensor and quantization info - armnn::TensorInfo inputWeightsInfo({numUnits, inputSize}, - armnn::DataType::QSymmS8, - weightsScale, - weightsOffset); - - armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize}, - armnn::DataType::QSymmS8, - weightsScale, - weightsOffset); - - armnn::TensorInfo biasInfo({numUnits}, - armnn::DataType::Signed32, - biasScale, - biasOffset); - - armnn::TensorInfo layerNormWeightsInfo({numUnits}, - armnn::DataType::QSymmS16, - layerNormScale, - layerNormOffset); - - // Mandatory params - std::vector inputToForgetWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - std::vector inputToCellWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - std::vector inputToOutputWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - - armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData); - armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData); - armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData); - - std::vector recurrentToForgetWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - std::vector recurrentToCellWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - std::vector recurrentToOutputWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - - armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData); - armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData); - armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData); - - std::vector forgetGateBiasData(numUnits, 1); - std::vector cellBiasData(numUnits, 0); - std::vector outputGateBiasData(numUnits, 0); - - armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData); - armnn::ConstTensor cellBias(biasInfo, cellBiasData); - armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData); - - // Layer Norm - std::vector forgetLayerNormWeightsData = - GenerateRandomData(layerNormWeightsInfo.GetNumElements()); - std::vector cellLayerNormWeightsData = - GenerateRandomData(layerNormWeightsInfo.GetNumElements()); - std::vector outputLayerNormWeightsData = - GenerateRandomData(layerNormWeightsInfo.GetNumElements()); - - armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData); - armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData); - armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData); - - // Set up params - armnn::LstmInputParams params; - - // Mandatory params - params.m_InputToForgetWeights = &inputToForgetWeights; - params.m_InputToCellWeights = &inputToCellWeights; - params.m_InputToOutputWeights = &inputToOutputWeights; - - params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; - params.m_RecurrentToCellWeights = &recurrentToCellWeights; - params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; - - params.m_ForgetGateBias = &forgetGateBias; - params.m_CellBias = &cellBias; - params.m_OutputGateBias = &outputGateBias; - - // Layer Norm - params.m_ForgetLayerNormWeights = &forgetLayerNormWeights; - params.m_CellLayerNormWeights = &cellLayerNormWeights; - params.m_OutputLayerNormWeights = &outputLayerNormWeights; - - // Create network - armnn::INetworkPtr network = armnn::INetwork::Create(); - const std::string layerName("qLstm"); - - armnn::IConnectableLayer* const input = network->AddInputLayer(0); - armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1); - armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2); - - armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str()); - - armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0); - armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1); - armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2); - - // Input/Output tensor info - armnn::TensorInfo inputInfo({numBatches , inputSize}, - armnn::DataType::QAsymmS8, - inputScale, - inputOffset); - - armnn::TensorInfo cellStateInfo({numBatches , numUnits}, - armnn::DataType::QSymmS16, - cellStateScale, - cellStateOffset); - - armnn::TensorInfo outputStateInfo({numBatches , outputSize}, - armnn::DataType::QAsymmS8, - outputScale, - outputOffset); - - // Connect input/output slots - input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0)); - input->GetOutputSlot(0).SetTensorInfo(inputInfo); - - outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1)); - outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo); - - cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2)); - cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo); - - qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0)); - qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo); - - qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0)); - qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo); - - qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0)); - qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo); - - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); - BOOST_CHECK(deserializedNetwork); - - VerifyQLstmLayer checker(layerName, - {inputInfo, cellStateInfo, outputStateInfo}, - {outputStateInfo, cellStateInfo, outputStateInfo}, - descriptor, - params); - - deserializedNetwork->Accept(checker); -} - -BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmAdvanced) -{ - armnn::QLstmDescriptor descriptor; - - descriptor.m_CifgEnabled = false; - descriptor.m_ProjectionEnabled = true; - descriptor.m_PeepholeEnabled = true; - descriptor.m_LayerNormEnabled = true; - - descriptor.m_CellClip = 0.1f; - descriptor.m_ProjectionClip = 0.1f; - - descriptor.m_InputIntermediateScale = 0.00001f; - descriptor.m_ForgetIntermediateScale = 0.00001f; - descriptor.m_CellIntermediateScale = 0.00001f; - descriptor.m_OutputIntermediateScale = 0.00001f; - - descriptor.m_HiddenStateScale = 0.07f; - descriptor.m_HiddenStateZeroPoint = 0; - - const unsigned int numBatches = 2; - const unsigned int inputSize = 5; - const unsigned int outputSize = 4; - const unsigned int numUnits = 4; - - // Scale/Offset quantization info - float inputScale = 0.0078f; - int32_t inputOffset = 0; - - float outputScale = 0.0078f; - int32_t outputOffset = 0; - - float cellStateScale = 3.5002e-05f; - int32_t cellStateOffset = 0; - - float weightsScale = 0.007f; - int32_t weightsOffset = 0; - - float layerNormScale = 3.5002e-05f; - int32_t layerNormOffset = 0; - - float biasScale = layerNormScale / 1024; - int32_t biasOffset = 0; - - // Weights and bias tensor and quantization info - armnn::TensorInfo inputWeightsInfo({numUnits, inputSize}, - armnn::DataType::QSymmS8, - weightsScale, - weightsOffset); - - armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize}, - armnn::DataType::QSymmS8, - weightsScale, - weightsOffset); - - armnn::TensorInfo biasInfo({numUnits}, - armnn::DataType::Signed32, - biasScale, - biasOffset); - - armnn::TensorInfo peepholeWeightsInfo({numUnits}, - armnn::DataType::QSymmS16, - weightsScale, - weightsOffset); - - armnn::TensorInfo layerNormWeightsInfo({numUnits}, - armnn::DataType::QSymmS16, - layerNormScale, - layerNormOffset); - - armnn::TensorInfo projectionWeightsInfo({outputSize, numUnits}, - armnn::DataType::QSymmS8, - weightsScale, - weightsOffset); - - // Mandatory params - std::vector inputToForgetWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - std::vector inputToCellWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - std::vector inputToOutputWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - - armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData); - armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData); - armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData); - - std::vector recurrentToForgetWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - std::vector recurrentToCellWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - std::vector recurrentToOutputWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - - armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData); - armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData); - armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData); - - std::vector forgetGateBiasData(numUnits, 1); - std::vector cellBiasData(numUnits, 0); - std::vector outputGateBiasData(numUnits, 0); - - armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData); - armnn::ConstTensor cellBias(biasInfo, cellBiasData); - armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData); - - // CIFG - std::vector inputToInputWeightsData = GenerateRandomData(inputWeightsInfo.GetNumElements()); - std::vector recurrentToInputWeightsData = - GenerateRandomData(recurrentWeightsInfo.GetNumElements()); - std::vector inputGateBiasData(numUnits, 1); - - armnn::ConstTensor inputToInputWeights(inputWeightsInfo, inputToInputWeightsData); - armnn::ConstTensor recurrentToInputWeights(recurrentWeightsInfo, recurrentToInputWeightsData); - armnn::ConstTensor inputGateBias(biasInfo, inputGateBiasData); - - // Peephole - std::vector cellToInputWeightsData = GenerateRandomData(peepholeWeightsInfo.GetNumElements()); - std::vector cellToForgetWeightsData = GenerateRandomData(peepholeWeightsInfo.GetNumElements()); - std::vector cellToOutputWeightsData = GenerateRandomData(peepholeWeightsInfo.GetNumElements()); - - armnn::ConstTensor cellToInputWeights(peepholeWeightsInfo, cellToInputWeightsData); - armnn::ConstTensor cellToForgetWeights(peepholeWeightsInfo, cellToForgetWeightsData); - armnn::ConstTensor cellToOutputWeights(peepholeWeightsInfo, cellToOutputWeightsData); - - // Projection - std::vector projectionWeightsData = GenerateRandomData(projectionWeightsInfo.GetNumElements()); - std::vector projectionBiasData(outputSize, 1); - - armnn::ConstTensor projectionWeights(projectionWeightsInfo, projectionWeightsData); - armnn::ConstTensor projectionBias(biasInfo, projectionBiasData); - - // Layer Norm - std::vector inputLayerNormWeightsData = - GenerateRandomData(layerNormWeightsInfo.GetNumElements()); - std::vector forgetLayerNormWeightsData = - GenerateRandomData(layerNormWeightsInfo.GetNumElements()); - std::vector cellLayerNormWeightsData = - GenerateRandomData(layerNormWeightsInfo.GetNumElements()); - std::vector outputLayerNormWeightsData = - GenerateRandomData(layerNormWeightsInfo.GetNumElements()); - - armnn::ConstTensor inputLayerNormWeights(layerNormWeightsInfo, inputLayerNormWeightsData); - armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData); - armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData); - armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData); - - // Set up params - armnn::LstmInputParams params; - - // Mandatory params - params.m_InputToForgetWeights = &inputToForgetWeights; - params.m_InputToCellWeights = &inputToCellWeights; - params.m_InputToOutputWeights = &inputToOutputWeights; - - params.m_RecurrentToForgetWeights = &recurrentToForgetWeights; - params.m_RecurrentToCellWeights = &recurrentToCellWeights; - params.m_RecurrentToOutputWeights = &recurrentToOutputWeights; - - params.m_ForgetGateBias = &forgetGateBias; - params.m_CellBias = &cellBias; - params.m_OutputGateBias = &outputGateBias; - - // CIFG - params.m_InputToInputWeights = &inputToInputWeights; - params.m_RecurrentToInputWeights = &recurrentToInputWeights; - params.m_InputGateBias = &inputGateBias; - - // Peephole - params.m_CellToInputWeights = &cellToInputWeights; - params.m_CellToForgetWeights = &cellToForgetWeights; - params.m_CellToOutputWeights = &cellToOutputWeights; - - // Projection - params.m_ProjectionWeights = &projectionWeights; - params.m_ProjectionBias = &projectionBias; - - // Layer Norm - params.m_InputLayerNormWeights = &inputLayerNormWeights; - params.m_ForgetLayerNormWeights = &forgetLayerNormWeights; - params.m_CellLayerNormWeights = &cellLayerNormWeights; - params.m_OutputLayerNormWeights = &outputLayerNormWeights; - - // Create network - armnn::INetworkPtr network = armnn::INetwork::Create(); - const std::string layerName("qLstm"); - - armnn::IConnectableLayer* const input = network->AddInputLayer(0); - armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1); - armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2); - - armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str()); - - armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0); - armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1); - armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2); - - // Input/Output tensor info - armnn::TensorInfo inputInfo({numBatches , inputSize}, - armnn::DataType::QAsymmS8, - inputScale, - inputOffset); - - armnn::TensorInfo cellStateInfo({numBatches , numUnits}, - armnn::DataType::QSymmS16, - cellStateScale, - cellStateOffset); - - armnn::TensorInfo outputStateInfo({numBatches , outputSize}, - armnn::DataType::QAsymmS8, - outputScale, - outputOffset); - - // Connect input/output slots - input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0)); - input->GetOutputSlot(0).SetTensorInfo(inputInfo); - - outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1)); - outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo); - - cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2)); - cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo); - - qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0)); - qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo); - - qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0)); - qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo); - - qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0)); - qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo); - - armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); - BOOST_CHECK(deserializedNetwork); - - VerifyQLstmLayer checker(layerName, - {inputInfo, cellStateInfo, outputStateInfo}, - {outputStateInfo, cellStateInfo, outputStateInfo}, - descriptor, - params); - - deserializedNetwork->Accept(checker); + deserializedNetwork->ExecuteStrategy(verifier); } BOOST_AUTO_TEST_SUITE_END() -- cgit v1.2.1