aboutsummaryrefslogtreecommitdiff
path: root/src/armnnSerializer
diff options
context:
space:
mode:
authorFinn Williams <Finn.Williams@arm.com>2021-02-09 15:56:23 +0000
committerFinn Williams <Finn.Williams@arm.com>2021-02-12 13:10:20 +0000
commitb454c5c65efb238c130b042ace390b2bc7f0bf75 (patch)
treed6681d0abf416b3cc280bc3bb70e7d55dfd40a0d /src/armnnSerializer
parent8eae955f665f371b0a2c7c1a06e8ba442afa2298 (diff)
downloadarmnn-b454c5c65efb238c130b042ace390b2bc7f0bf75.tar.gz
IVGCVSW-4893 Refactor ILayerVisitor using unified interface strategy.
Signed-off-by: Jan Eilers <jan.eilers@arm.com> Signed-off-by: Finn Williams <Finn.Williams@arm.com> Signed-off-by: Francis Murtagh <francis.murtagh@arm.com> Change-Id: Id7bc8255a8e3f9e5aac65d510bec8a559bf37246
Diffstat (limited to 'src/armnnSerializer')
-rw-r--r--src/armnnSerializer/Serializer.cpp817
-rw-r--r--src/armnnSerializer/Serializer.hpp481
-rw-r--r--src/armnnSerializer/test/ActivationSerializationTests.cpp19
-rw-r--r--src/armnnSerializer/test/ComparisonSerializationTests.cpp123
-rw-r--r--src/armnnSerializer/test/LstmSerializationTests.cpp2199
-rw-r--r--src/armnnSerializer/test/SerializerTestUtils.cpp163
-rw-r--r--src/armnnSerializer/test/SerializerTestUtils.hpp167
-rw-r--r--src/armnnSerializer/test/SerializerTests.cpp3461
8 files changed, 3861 insertions, 3569 deletions
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 28afac7b62..bcdaa087fb 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -3,6 +3,7 @@
// SPDX-License-Identifier: MIT
//
#include "Serializer.hpp"
+#include "SerializerUtils.hpp"
#include <armnn/Descriptors.hpp>
#include <armnn/LstmParams.hpp>
@@ -10,9 +11,9 @@
#include <armnn/utility/IgnoreUnused.hpp>
#include <armnn/utility/NumericCast.hpp>
+#include <fmt/format.h>
#include <iostream>
-#include "SerializerUtils.hpp"
using namespace armnn;
namespace fb = flatbuffers;
@@ -95,7 +96,7 @@ serializer::ArgMinMaxFunction GetFlatBufferArgMinMaxFunction(armnn::ArgMinMaxFun
}
}
-uint32_t SerializerVisitor::GetSerializedId(armnn::LayerGuid guid)
+uint32_t SerializerStrategy::GetSerializedId(armnn::LayerGuid guid)
{
if (m_guidMap.empty())
{
@@ -112,7 +113,7 @@ uint32_t SerializerVisitor::GetSerializedId(armnn::LayerGuid guid)
}
// Build FlatBuffer for Input Layer
-void SerializerVisitor::VisitInputLayer(const armnn::IConnectableLayer* layer, LayerBindingId id, const char* name)
+void SerializerStrategy::SerializeInputLayer(const armnn::IConnectableLayer* layer, LayerBindingId id, const char* name)
{
IgnoreUnused(name);
@@ -134,7 +135,8 @@ void SerializerVisitor::VisitInputLayer(const armnn::IConnectableLayer* layer, L
}
// Build FlatBuffer for Output Layer
-void SerializerVisitor::VisitOutputLayer(const armnn::IConnectableLayer* layer, LayerBindingId id, const char* name)
+void SerializerStrategy::SerializeOutputLayer(const armnn::IConnectableLayer* layer,
+ LayerBindingId id, const char* name)
{
IgnoreUnused(name);
@@ -154,7 +156,7 @@ void SerializerVisitor::VisitOutputLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferOutputLayer.o, serializer::Layer::Layer_OutputLayer);
}
-void SerializerVisitor::VisitAbsLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeAbsLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
auto flatBufferBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Abs);
@@ -164,9 +166,9 @@ void SerializerVisitor::VisitAbsLayer(const armnn::IConnectableLayer* layer, con
}
// Build FlatBuffer for Activation Layer
-void SerializerVisitor::VisitActivationLayer(const armnn::IConnectableLayer* layer,
- const armnn::ActivationDescriptor& descriptor,
- const char* name)
+void SerializerStrategy::SerializeActivationLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ActivationDescriptor& descriptor,
+ const char* name)
{
IgnoreUnused(name);
@@ -189,7 +191,7 @@ void SerializerVisitor::VisitActivationLayer(const armnn::IConnectableLayer* lay
}
// Build FlatBuffer for Addition Layer
-void SerializerVisitor::VisitAdditionLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeAdditionLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -204,9 +206,9 @@ void SerializerVisitor::VisitAdditionLayer(const armnn::IConnectableLayer* layer
}
// Build FlatBuffer for ArgMinMax Layer
-void SerializerVisitor::VisitArgMinMaxLayer(const armnn::IConnectableLayer *layer,
- const armnn::ArgMinMaxDescriptor& descriptor,
- const char *name)
+void SerializerStrategy::SerializeArgMinMaxLayer(const armnn::IConnectableLayer *layer,
+ const armnn::ArgMinMaxDescriptor& descriptor,
+ const char *name)
{
IgnoreUnused(name);
@@ -227,9 +229,9 @@ void SerializerVisitor::VisitArgMinMaxLayer(const armnn::IConnectableLayer *laye
}
// Build FlatBuffer for BatchToSpaceNd Layer
-void SerializerVisitor::VisitBatchToSpaceNdLayer(const armnn::IConnectableLayer* layer,
- const armnn::BatchToSpaceNdDescriptor& descriptor,
- const char* name)
+void SerializerStrategy::SerializeBatchToSpaceNdLayer(const armnn::IConnectableLayer* layer,
+ const armnn::BatchToSpaceNdDescriptor& descriptor,
+ const char* name)
{
IgnoreUnused(name);
@@ -257,16 +259,19 @@ void SerializerVisitor::VisitBatchToSpaceNdLayer(const armnn::IConnectableLayer*
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_BatchToSpaceNdLayer);
}
-void SerializerVisitor::VisitBatchNormalizationLayer(const armnn::IConnectableLayer* layer,
- const armnn::BatchNormalizationDescriptor& batchNormDescriptor,
- const armnn::ConstTensor& mean,
- const armnn::ConstTensor& variance,
- const armnn::ConstTensor& beta,
- const armnn::ConstTensor& gamma,
- const char* name)
+void SerializerStrategy::SerializeBatchNormalizationLayer(
+ const armnn::IConnectableLayer* layer,
+ const armnn::BatchNormalizationDescriptor& batchNormDescriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name)
{
IgnoreUnused(name);
+ const armnn::ConstTensor& mean = constants[0];
+ const armnn::ConstTensor& variance = constants[1];
+ const armnn::ConstTensor& beta = constants[2];
+ const armnn::ConstTensor& gamma = constants[3];
+
auto fbBatchNormalizationBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_BatchNormalization);
auto fbBatchNormalizationDescriptor = serializer::CreateBatchNormalizationDescriptor(
m_flatBufferBuilder,
@@ -288,7 +293,7 @@ void SerializerVisitor::VisitBatchNormalizationLayer(const armnn::IConnectableLa
CreateAnyLayer(fbBatchNormalizationLayer.o, serializer::Layer::Layer_BatchNormalizationLayer);
}
-void SerializerVisitor::VisitComparisonLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeComparisonLayer(const armnn::IConnectableLayer* layer,
const armnn::ComparisonDescriptor& descriptor,
const char* name)
{
@@ -304,12 +309,14 @@ void SerializerVisitor::VisitComparisonLayer(const armnn::IConnectableLayer* lay
}
// Build FlatBuffer for Constant Layer
-void SerializerVisitor::VisitConstantLayer(const armnn::IConnectableLayer* layer,
- const armnn::ConstTensor& input,
- const char* name)
+void SerializerStrategy::SerializeConstantLayer(const armnn::IConnectableLayer* layer,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name)
{
IgnoreUnused(name);
+ armnn::ConstTensor input = constants[0];
+
// Create FlatBuffer BaseLayer
auto flatBufferConstantBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Constant);
@@ -325,14 +332,15 @@ void SerializerVisitor::VisitConstantLayer(const armnn::IConnectableLayer* layer
}
// Build FlatBuffer for Convolution2dLayer
-void SerializerVisitor::VisitConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const armnn::Convolution2dDescriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name)
+void SerializerStrategy::SerializeConvolution2dLayer(const armnn::IConnectableLayer* layer,
+ const armnn::Convolution2dDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name)
{
IgnoreUnused(name);
+ const armnn::ConstTensor weights = constants[0];
+
// Create FlatBuffer BaseLayer
auto flatBufferBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Convolution2d);
@@ -350,9 +358,10 @@ void SerializerVisitor::VisitConvolution2dLayer(const armnn::IConnectableLayer*
auto flatBufferWeightsConstTensorInfo = CreateConstTensorInfo(weights);
flatbuffers::Offset<serializer::ConstTensor> flatBufferBiasesConstTensorInfo;
- if (biases.has_value())
+ if (constants.size() > 1)
{
- flatBufferBiasesConstTensorInfo = CreateConstTensorInfo(biases.value());
+ const armnn::ConstTensor biases = constants[1];
+ flatBufferBiasesConstTensorInfo = CreateConstTensorInfo(biases);
}
// Create the FlatBuffer Convolution2dLayer
@@ -366,7 +375,7 @@ void SerializerVisitor::VisitConvolution2dLayer(const armnn::IConnectableLayer*
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_Convolution2dLayer);
}
-void SerializerVisitor::VisitDepthToSpaceLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeDepthToSpaceLayer(const armnn::IConnectableLayer* layer,
const armnn::DepthToSpaceDescriptor& descriptor,
const char* name)
{
@@ -382,14 +391,15 @@ void SerializerVisitor::VisitDepthToSpaceLayer(const armnn::IConnectableLayer* l
CreateAnyLayer(fbLayer.o, serializer::Layer::Layer_DepthToSpaceLayer);
}
-void SerializerVisitor::VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const armnn::DepthwiseConvolution2dDescriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name)
+void SerializerStrategy::SerializeDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
+ const armnn::DepthwiseConvolution2dDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name)
{
IgnoreUnused(name);
+ const armnn::ConstTensor& weights = constants[0];
+
auto fbBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_DepthwiseConvolution2d);
auto fbDescriptor = CreateDepthwiseConvolution2dDescriptor(m_flatBufferBuilder,
descriptor.m_PadLeft,
@@ -405,9 +415,11 @@ void SerializerVisitor::VisitDepthwiseConvolution2dLayer(const armnn::IConnectab
flatbuffers::Offset<serializer::ConstTensor> fbWeightsConstTensorInfo = CreateConstTensorInfo(weights);
flatbuffers::Offset<serializer::ConstTensor> fbBiasesConstTensorInfo;
- if (biases.has_value())
+
+ if (constants.size() > 1)
{
- fbBiasesConstTensorInfo = CreateConstTensorInfo(biases.value());
+ const armnn::ConstTensor& biases = constants[1];
+ fbBiasesConstTensorInfo = CreateConstTensorInfo(biases);
}
auto flatBufferLayer = CreateDepthwiseConvolution2dLayer(m_flatBufferBuilder,
@@ -419,7 +431,7 @@ void SerializerVisitor::VisitDepthwiseConvolution2dLayer(const armnn::IConnectab
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_DepthwiseConvolution2dLayer);
}
-void SerializerVisitor::VisitDequantizeLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeDequantizeLayer(const armnn::IConnectableLayer* layer,
const char* name)
{
IgnoreUnused(name);
@@ -430,13 +442,15 @@ void SerializerVisitor::VisitDequantizeLayer(const armnn::IConnectableLayer* lay
CreateAnyLayer(fbDequantizeLayer.o, serializer::Layer::Layer_DequantizeLayer);
}
-void SerializerVisitor::VisitDetectionPostProcessLayer(const armnn::IConnectableLayer* layer,
- const armnn::DetectionPostProcessDescriptor& descriptor,
- const armnn::ConstTensor& anchors,
- const char* name)
+void SerializerStrategy::SerializeDetectionPostProcessLayer(const armnn::IConnectableLayer* layer,
+ const armnn::DetectionPostProcessDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name)
{
IgnoreUnused(name);
+ const armnn::ConstTensor& anchors = constants[0];
+
auto fbBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_DetectionPostProcess);
auto fbDescriptor = CreateDetectionPostProcessDescriptor(m_flatBufferBuilder,
descriptor.m_MaxDetections,
@@ -461,7 +475,7 @@ void SerializerVisitor::VisitDetectionPostProcessLayer(const armnn::IConnectable
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_DetectionPostProcessLayer);
}
-void SerializerVisitor::VisitDivisionLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeDivisionLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -471,7 +485,7 @@ void SerializerVisitor::VisitDivisionLayer(const armnn::IConnectableLayer* layer
CreateAnyLayer(fbDivisionLayer.o, serializer::Layer::Layer_DivisionLayer);
}
-void SerializerVisitor::VisitElementwiseUnaryLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeElementwiseUnaryLayer(const armnn::IConnectableLayer* layer,
const armnn::ElementwiseUnaryDescriptor& descriptor,
const char* name)
{
@@ -486,7 +500,7 @@ void SerializerVisitor::VisitElementwiseUnaryLayer(const armnn::IConnectableLaye
CreateAnyLayer(fbLayer.o, serializer::Layer::Layer_ElementwiseUnaryLayer);
}
-void SerializerVisitor::VisitEqualLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeEqualLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -496,7 +510,7 @@ void SerializerVisitor::VisitEqualLayer(const armnn::IConnectableLayer* layer, c
CreateAnyLayer(fbEqualLayer.o, serializer::Layer::Layer_EqualLayer);
}
-void SerializerVisitor::VisitFillLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeFillLayer(const armnn::IConnectableLayer* layer,
const armnn::FillDescriptor& fillDescriptor,
const char* name)
{
@@ -511,7 +525,7 @@ void SerializerVisitor::VisitFillLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(fbFillLayer.o, serializer::Layer::Layer_FillLayer);
}
-void SerializerVisitor::VisitFloorLayer(const armnn::IConnectableLayer *layer, const char *name)
+void SerializerStrategy::SerializeFloorLayer(const armnn::IConnectableLayer *layer, const char *name)
{
IgnoreUnused(name);
@@ -521,14 +535,7 @@ void SerializerVisitor::VisitFloorLayer(const armnn::IConnectableLayer *layer, c
CreateAnyLayer(flatBufferFloorLayer.o, serializer::Layer::Layer_FloorLayer);
}
-void SerializerVisitor::VisitGatherLayer(const armnn::IConnectableLayer* layer,
- const char* name)
-{
- armnn::GatherDescriptor gatherDescriptor{};
- VisitGatherLayer(layer, gatherDescriptor, name);
-}
-
-void SerializerVisitor::VisitGatherLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeGatherLayer(const armnn::IConnectableLayer* layer,
const armnn::GatherDescriptor& gatherDescriptor,
const char* name)
{
@@ -542,7 +549,8 @@ void SerializerVisitor::VisitGatherLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_GatherLayer);
}
-void SerializerVisitor::VisitGreaterLayer(const armnn::IConnectableLayer* layer, const char* name)
+
+void SerializerStrategy::SerializeGreaterLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -552,7 +560,7 @@ void SerializerVisitor::VisitGreaterLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(fbGreaterLayer.o, serializer::Layer::Layer_GreaterLayer);
}
-void SerializerVisitor::VisitInstanceNormalizationLayer(
+void SerializerStrategy::SerializeInstanceNormalizationLayer(
const armnn::IConnectableLayer* layer,
const armnn::InstanceNormalizationDescriptor& instanceNormalizationDescriptor,
const char* name)
@@ -572,7 +580,7 @@ void SerializerVisitor::VisitInstanceNormalizationLayer(
CreateAnyLayer(fbLayer.o, serializer::Layer::Layer_InstanceNormalizationLayer);
}
-void SerializerVisitor::VisitL2NormalizationLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeL2NormalizationLayer(const armnn::IConnectableLayer* layer,
const armnn::L2NormalizationDescriptor& l2NormalizationDescriptor,
const char* name)
{
@@ -593,7 +601,7 @@ void SerializerVisitor::VisitL2NormalizationLayer(const armnn::IConnectableLayer
CreateAnyLayer(fbLayer.o, serializer::Layer::Layer_L2NormalizationLayer);
}
-void SerializerVisitor::VisitLogicalBinaryLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeLogicalBinaryLayer(const armnn::IConnectableLayer* layer,
const armnn::LogicalBinaryDescriptor& descriptor,
const char* name)
{
@@ -608,7 +616,7 @@ void SerializerVisitor::VisitLogicalBinaryLayer(const armnn::IConnectableLayer*
CreateAnyLayer(fbLayer.o, serializer::Layer::Layer_LogicalBinaryLayer);
}
-void SerializerVisitor::VisitLogSoftmaxLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeLogSoftmaxLayer(const armnn::IConnectableLayer* layer,
const armnn::LogSoftmaxDescriptor& logSoftmaxDescriptor,
const char* name)
{
@@ -632,10 +640,10 @@ void SerializerVisitor::VisitLogSoftmaxLayer(const armnn::IConnectableLayer* lay
CreateAnyLayer(flatBufferLogSoftmaxLayer.o, serializer::Layer::Layer_LogSoftmaxLayer);
}
-void SerializerVisitor::VisitLstmLayer(const armnn::IConnectableLayer* layer,
- const armnn::LstmDescriptor& descriptor,
- const armnn::LstmInputParams& params,
- const char* name)
+void SerializerStrategy::SerializeLstmLayer(const armnn::IConnectableLayer* layer,
+ const armnn::LstmDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name)
{
IgnoreUnused(name);
@@ -651,16 +659,21 @@ void SerializerVisitor::VisitLstmLayer(const armnn::IConnectableLayer* layer,
descriptor.m_ProjectionEnabled,
descriptor.m_LayerNormEnabled);
- // Get mandatory input parameters
- auto inputToForgetWeights = CreateConstTensorInfo(*params.m_InputToForgetWeights);
- auto inputToCellWeights = CreateConstTensorInfo(*params.m_InputToCellWeights);
- auto inputToOutputWeights = CreateConstTensorInfo(*params.m_InputToOutputWeights);
- auto recurrentToForgetWeights = CreateConstTensorInfo(*params.m_RecurrentToForgetWeights);
- auto recurrentToCellWeights = CreateConstTensorInfo(*params.m_RecurrentToCellWeights);
- auto recurrentToOutputWeights = CreateConstTensorInfo(*params.m_RecurrentToOutputWeights);
- auto forgetGateBias = CreateConstTensorInfo(*params.m_ForgetGateBias);
- auto cellBias = CreateConstTensorInfo(*params.m_CellBias);
- auto outputGateBias = CreateConstTensorInfo(*params.m_OutputGateBias);
+ // Index for constants vector
+ std::size_t i = 0;
+
+ // Get mandatory/basic input parameters
+ auto inputToForgetWeights = CreateConstTensorInfo(constants[i++]); //InputToForgetWeights
+ auto inputToCellWeights = CreateConstTensorInfo(constants[i++]); //InputToCellWeights
+ auto inputToOutputWeights = CreateConstTensorInfo(constants[i++]); //InputToOutputWeights
+ auto recurrentToForgetWeights = CreateConstTensorInfo(constants[i++]); //RecurrentToForgetWeights
+ auto recurrentToCellWeights = CreateConstTensorInfo(constants[i++]); //RecurrentToCellWeights
+ auto recurrentToOutputWeights = CreateConstTensorInfo(constants[i++]); //RecurrentToOutputWeights
+ auto forgetGateBias = CreateConstTensorInfo(constants[i++]); //ForgetGateBias
+ auto cellBias = CreateConstTensorInfo(constants[i++]); //CellBias
+ auto outputGateBias = CreateConstTensorInfo(constants[i++]); //OutputGateBias
+
+
//Define optional parameters, these will be set depending on configuration in Lstm descriptor
flatbuffers::Offset<serializer::ConstTensor> inputToInputWeights;
@@ -678,33 +691,36 @@ void SerializerVisitor::VisitLstmLayer(const armnn::IConnectableLayer* layer,
if (!descriptor.m_CifgEnabled)
{
- inputToInputWeights = CreateConstTensorInfo(*params.m_InputToInputWeights);
- recurrentToInputWeights = CreateConstTensorInfo(*params.m_RecurrentToInputWeights);
- cellToInputWeights = CreateConstTensorInfo(*params.m_CellToInputWeights);
- inputGateBias = CreateConstTensorInfo(*params.m_InputGateBias);
+ inputToInputWeights = CreateConstTensorInfo(constants[i++]); //InputToInputWeights
+ recurrentToInputWeights = CreateConstTensorInfo(constants[i++]); //RecurrentToInputWeights
+ inputGateBias = CreateConstTensorInfo(constants[i++]); //InputGateBias
}
- if (descriptor.m_ProjectionEnabled)
+ if (descriptor.m_PeepholeEnabled)
{
- projectionWeights = CreateConstTensorInfo(*params.m_ProjectionWeights);
- projectionBias = CreateConstTensorInfo(*params.m_ProjectionBias);
+ if (!descriptor.m_CifgEnabled)
+ {
+ cellToInputWeights = CreateConstTensorInfo(constants[i++]); //CellToInputWeights
+ }
+ cellToForgetWeights = CreateConstTensorInfo(constants[i++]); //CellToForgetWeights
+ cellToOutputWeights = CreateConstTensorInfo(constants[i++]); //CellToOutputWeights
}
- if (descriptor.m_PeepholeEnabled)
+ if (descriptor.m_ProjectionEnabled)
{
- cellToForgetWeights = CreateConstTensorInfo(*params.m_CellToForgetWeights);
- cellToOutputWeights = CreateConstTensorInfo(*params.m_CellToOutputWeights);
+ projectionWeights = CreateConstTensorInfo(constants[i++]); //ProjectionWeights
+ projectionBias = CreateConstTensorInfo(constants[i++]); //ProjectionBias
}
if (descriptor.m_LayerNormEnabled)
{
if (!descriptor.m_CifgEnabled)
{
- inputLayerNormWeights = CreateConstTensorInfo((*params.m_InputLayerNormWeights));
+ inputLayerNormWeights = CreateConstTensorInfo(constants[i++]); //InputLayerNormWeights
}
- forgetLayerNormWeights = CreateConstTensorInfo(*params.m_ForgetLayerNormWeights);
- cellLayerNormWeights = CreateConstTensorInfo(*params.m_CellLayerNormWeights);
- outputLayerNormWeights = CreateConstTensorInfo(*params.m_OutputLayerNormWeights);
+ forgetLayerNormWeights = CreateConstTensorInfo(constants[i++]); //ForgetLayerNormWeights
+ cellLayerNormWeights = CreateConstTensorInfo(constants[i++]); //CellLayerNormWeights
+ outputLayerNormWeights = CreateConstTensorInfo(constants[i++]); //OutputLayerNormWeights
}
auto fbLstmParams = serializer::CreateLstmInputParams(
@@ -740,7 +756,7 @@ void SerializerVisitor::VisitLstmLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(fbLstmLayer.o, serializer::Layer::Layer_LstmLayer);
}
-void SerializerVisitor::VisitMaximumLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeMaximumLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -750,7 +766,7 @@ void SerializerVisitor::VisitMaximumLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(fbMaximumLayer.o, serializer::Layer::Layer_MaximumLayer);
}
-void SerializerVisitor::VisitMeanLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeMeanLayer(const armnn::IConnectableLayer* layer,
const armnn::MeanDescriptor& descriptor,
const char* name)
{
@@ -768,7 +784,7 @@ void SerializerVisitor::VisitMeanLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(fbMeanLayer.o, serializer::Layer::Layer_MeanLayer);
}
-void SerializerVisitor::VisitMinimumLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeMinimumLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -778,7 +794,7 @@ void SerializerVisitor::VisitMinimumLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(fbMinimumLayer.o, serializer::Layer::Layer_MinimumLayer);
}
-void SerializerVisitor::VisitMergeLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeMergeLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -788,14 +804,14 @@ void SerializerVisitor::VisitMergeLayer(const armnn::IConnectableLayer* layer, c
CreateAnyLayer(fbMergeLayer.o, serializer::Layer::Layer_MergeLayer);
}
-void SerializerVisitor::VisitMergerLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeMergerLayer(const armnn::IConnectableLayer* layer,
const armnn::MergerDescriptor& mergerDescriptor,
const char* name)
{
- VisitConcatLayer(layer, mergerDescriptor, name);
+ SerializeConcatLayer(layer, mergerDescriptor, name);
}
-void SerializerVisitor::VisitConcatLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeConcatLayer(const armnn::IConnectableLayer* layer,
const armnn::ConcatDescriptor& concatDescriptor,
const char* name)
{
@@ -830,7 +846,7 @@ void SerializerVisitor::VisitConcatLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_ConcatLayer);
}
-void SerializerVisitor::VisitMultiplicationLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeMultiplicationLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -841,7 +857,7 @@ void SerializerVisitor::VisitMultiplicationLayer(const armnn::IConnectableLayer*
CreateAnyLayer(fbMultiplicationLayer.o, serializer::Layer::Layer_MultiplicationLayer);
}
-void SerializerVisitor::VisitPadLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializePadLayer(const armnn::IConnectableLayer* layer,
const armnn::PadDescriptor& padDescriptor,
const char* name)
{
@@ -867,7 +883,7 @@ void SerializerVisitor::VisitPadLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferPadLayer.o, serializer::Layer::Layer_PadLayer);
}
-void SerializerVisitor::VisitPermuteLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializePermuteLayer(const armnn::IConnectableLayer* layer,
const armnn::PermuteDescriptor& permuteDescriptor,
const char* name)
{
@@ -895,7 +911,7 @@ void SerializerVisitor::VisitPermuteLayer(const armnn::IConnectableLayer* layer,
}
// Build FlatBuffer for Rank Layer
-void SerializerVisitor::VisitRankLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeRankLayer(const armnn::IConnectableLayer* layer,
const char* name)
{
IgnoreUnused(name);
@@ -905,9 +921,9 @@ void SerializerVisitor::VisitRankLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferRankLayer.o, serializer::Layer::Layer_RankLayer);
}
-void SerializerVisitor::VisitReduceLayer(const armnn::IConnectableLayer* layer,
- const armnn::ReduceDescriptor& reduceDescriptor,
- const char*)
+void SerializerStrategy::SerializeReduceLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ReduceDescriptor& reduceDescriptor,
+ const char*)
{
auto fbReduceBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Reduce);
auto fbDescriptor = CreateReduceDescriptor(m_flatBufferBuilder,
@@ -922,7 +938,7 @@ void SerializerVisitor::VisitReduceLayer(const armnn::IConnectableLayer* layer,
}
// Build FlatBuffer for Reshape Layer
-void SerializerVisitor::VisitReshapeLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeReshapeLayer(const armnn::IConnectableLayer* layer,
const armnn::ReshapeDescriptor& reshapeDescriptor,
const char* name)
{
@@ -948,7 +964,7 @@ void SerializerVisitor::VisitReshapeLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferReshapeLayer.o, serializer::Layer::Layer_ReshapeLayer);
}
-void SerializerVisitor::VisitResizeBilinearLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeResizeBilinearLayer(const armnn::IConnectableLayer* layer,
const armnn::ResizeBilinearDescriptor& resizeDescriptor,
const char* name)
{
@@ -971,7 +987,7 @@ void SerializerVisitor::VisitResizeBilinearLayer(const armnn::IConnectableLayer*
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_ResizeBilinearLayer);
}
-void SerializerVisitor::VisitResizeLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeResizeLayer(const armnn::IConnectableLayer* layer,
const armnn::ResizeDescriptor& resizeDescriptor,
const char* name)
{
@@ -995,7 +1011,7 @@ void SerializerVisitor::VisitResizeLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_ResizeLayer);
}
-void SerializerVisitor::VisitRsqrtLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeRsqrtLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -1005,7 +1021,7 @@ void SerializerVisitor::VisitRsqrtLayer(const armnn::IConnectableLayer* layer, c
CreateAnyLayer(fbRsqrtLayer.o, serializer::Layer::Layer_RsqrtLayer);
}
-void SerializerVisitor::VisitSliceLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeSliceLayer(const armnn::IConnectableLayer* layer,
const armnn::SliceDescriptor& sliceDescriptor,
const char* name)
{
@@ -1022,7 +1038,7 @@ void SerializerVisitor::VisitSliceLayer(const armnn::IConnectableLayer* layer,
}
// Build FlatBuffer for Softmax Layer
-void SerializerVisitor::VisitSoftmaxLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeSoftmaxLayer(const armnn::IConnectableLayer* layer,
const armnn::SoftmaxDescriptor& softmaxDescriptor,
const char* name)
{
@@ -1044,7 +1060,7 @@ void SerializerVisitor::VisitSoftmaxLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferSoftmaxLayer.o, serializer::Layer::Layer_SoftmaxLayer);
}
-void SerializerVisitor::VisitPooling2dLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializePooling2dLayer(const armnn::IConnectableLayer* layer,
const armnn::Pooling2dDescriptor& pooling2dDescriptor,
const char* name)
{
@@ -1073,7 +1089,7 @@ void SerializerVisitor::VisitPooling2dLayer(const armnn::IConnectableLayer* laye
CreateAnyLayer(fbPooling2dLayer.o, serializer::Layer::Layer_Pooling2dLayer);
}
-void SerializerVisitor::VisitPreluLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializePreluLayer(const armnn::IConnectableLayer* layer,
const char* name)
{
IgnoreUnused(name);
@@ -1088,7 +1104,7 @@ void SerializerVisitor::VisitPreluLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferPreluLayer.o, serializer::Layer::Layer_PreluLayer);
}
-void SerializerVisitor::VisitQuantizeLayer(const armnn::IConnectableLayer *layer, const char *name)
+void SerializerStrategy::SerializeQuantizeLayer(const armnn::IConnectableLayer *layer, const char *name)
{
IgnoreUnused(name);
@@ -1099,14 +1115,15 @@ void SerializerVisitor::VisitQuantizeLayer(const armnn::IConnectableLayer *layer
}
// Build FlatBuffer for FullyConnected Layer
-void SerializerVisitor::VisitFullyConnectedLayer(const armnn::IConnectableLayer* layer,
- const armnn::FullyConnectedDescriptor& fullyConnectedDescriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name)
+void SerializerStrategy::SerializeFullyConnectedLayer(const armnn::IConnectableLayer* layer,
+ const armnn::FullyConnectedDescriptor& fullyConnectedDescriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name)
{
IgnoreUnused(name);
+ const armnn::ConstTensor& weights = constants.at(0);
+
// Create FlatBuffer BaseLayer
auto flatBufferBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_FullyConnected);
@@ -1123,7 +1140,8 @@ void SerializerVisitor::VisitFullyConnectedLayer(const armnn::IConnectableLayer*
flatbuffers::Offset<serializer::ConstTensor> flatBufferBiases;
if (fullyConnectedDescriptor.m_BiasEnabled)
{
- flatBufferBiases = CreateConstTensorInfo(biases.value());
+ armnn::ConstTensor biases = constants.at(1);
+ flatBufferBiases = CreateConstTensorInfo(biases);
}
// Create FlatBuffer FullyConnectedLayer
@@ -1138,7 +1156,7 @@ void SerializerVisitor::VisitFullyConnectedLayer(const armnn::IConnectableLayer*
}
// Build FlatBuffer for SpaceToBatchNd Layer
-void SerializerVisitor::VisitSpaceToBatchNdLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeSpaceToBatchNdLayer(const armnn::IConnectableLayer* layer,
const armnn::SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor,
const char* name)
{
@@ -1169,7 +1187,7 @@ void SerializerVisitor::VisitSpaceToBatchNdLayer(const armnn::IConnectableLayer*
}
// Build FlatBuffer for SpaceToDepthLayer
-void SerializerVisitor::VisitSpaceToDepthLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeSpaceToDepthLayer(const armnn::IConnectableLayer* layer,
const armnn::SpaceToDepthDescriptor& spaceToDepthDescriptor,
const char* name)
{
@@ -1189,7 +1207,7 @@ void SerializerVisitor::VisitSpaceToDepthLayer(const armnn::IConnectableLayer* l
}
// Build FlatBuffer for Splitter Layer
-void SerializerVisitor::VisitSplitterLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeSplitterLayer(const armnn::IConnectableLayer* layer,
const armnn::ViewsDescriptor& viewsDescriptor,
const char* name)
{
@@ -1255,7 +1273,7 @@ void SerializerVisitor::VisitSplitterLayer(const armnn::IConnectableLayer* layer
CreateAnyLayer(flatBufferSplitterLayer.o, serializer::Layer::Layer_SplitterLayer);
}
-void SerializerVisitor::VisitNormalizationLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeNormalizationLayer(const armnn::IConnectableLayer* layer,
const armnn::NormalizationDescriptor& descriptor,
const char* name)
{
@@ -1280,7 +1298,7 @@ void SerializerVisitor::VisitNormalizationLayer(const armnn::IConnectableLayer*
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_NormalizationLayer);
}
-void SerializerVisitor::VisitStackLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeStackLayer(const armnn::IConnectableLayer* layer,
const armnn::StackDescriptor& stackDescriptor,
const char* name)
{
@@ -1303,7 +1321,7 @@ void SerializerVisitor::VisitStackLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(stackLayer.o, serializer::Layer::Layer_StackLayer);
}
-void SerializerVisitor::VisitStandInLayer(const armnn::IConnectableLayer *layer,
+void SerializerStrategy::SerializeStandInLayer(const armnn::IConnectableLayer *layer,
const armnn::StandInDescriptor& standInDescriptor,
const char *name)
{
@@ -1319,7 +1337,7 @@ void SerializerVisitor::VisitStandInLayer(const armnn::IConnectableLayer *layer,
CreateAnyLayer(fbLayer.o, serializer::Layer::Layer_StandInLayer);
}
-void SerializerVisitor::VisitStridedSliceLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeStridedSliceLayer(const armnn::IConnectableLayer* layer,
const armnn::StridedSliceDescriptor& stridedSliceDescriptor,
const char* name)
{
@@ -1346,7 +1364,7 @@ void SerializerVisitor::VisitStridedSliceLayer(const armnn::IConnectableLayer* l
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_StridedSliceLayer);
}
-void SerializerVisitor::VisitSubtractionLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeSubtractionLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -1356,7 +1374,7 @@ void SerializerVisitor::VisitSubtractionLayer(const armnn::IConnectableLayer* la
CreateAnyLayer(fbSubtractionLayer.o, serializer::Layer::Layer_SubtractionLayer);
}
-void SerializerVisitor::VisitSwitchLayer(const armnn::IConnectableLayer* layer, const char* name)
+void SerializerStrategy::SerializeSwitchLayer(const armnn::IConnectableLayer* layer, const char* name)
{
IgnoreUnused(name);
@@ -1366,15 +1384,16 @@ void SerializerVisitor::VisitSwitchLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(fbSwitchLayer.o, serializer::Layer::Layer_SwitchLayer);
}
-void SerializerVisitor::VisitTransposeConvolution2dLayer(
+void SerializerStrategy::SerializeTransposeConvolution2dLayer(
const armnn::IConnectableLayer* layer,
const armnn::TransposeConvolution2dDescriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
+ const std::vector<armnn::ConstTensor>& constants,
const char* name)
{
IgnoreUnused(name);
+ const armnn::ConstTensor& weights = constants.at(0);
+
auto fbBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Convolution2d);
auto fbDescriptor = CreateTransposeConvolution2dDescriptor(m_flatBufferBuilder,
descriptor.m_PadLeft,
@@ -1389,9 +1408,10 @@ void SerializerVisitor::VisitTransposeConvolution2dLayer(
// weights & biases
auto fbWeightsConstTensorInfo = CreateConstTensorInfo(weights);
flatbuffers::Offset<serializer::ConstTensor> fbBiasesConstTensorInfo;
- if (biases.has_value())
+ if (constants.size() > 1)
{
- fbBiasesConstTensorInfo = CreateConstTensorInfo(biases.value());
+ const armnn::ConstTensor& biases = constants.at(1);
+ fbBiasesConstTensorInfo = CreateConstTensorInfo(biases);
}
auto fbLayer = CreateTransposeConvolution2dLayer(m_flatBufferBuilder,
@@ -1403,7 +1423,7 @@ void SerializerVisitor::VisitTransposeConvolution2dLayer(
CreateAnyLayer(fbLayer.o, serializer::Layer::Layer_TransposeConvolution2dLayer);
}
-void SerializerVisitor::VisitTransposeLayer(const armnn::IConnectableLayer* layer,
+void SerializerStrategy::SerializeTransposeLayer(const armnn::IConnectableLayer* layer,
const armnn::TransposeDescriptor& descriptor,
const char* name)
{
@@ -1430,10 +1450,10 @@ void SerializerVisitor::VisitTransposeLayer(const armnn::IConnectableLayer* laye
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_TransposeLayer);
}
-void SerializerVisitor::VisitQLstmLayer(const armnn::IConnectableLayer* layer,
- const armnn::QLstmDescriptor& descriptor,
- const armnn::LstmInputParams& params,
- const char* name)
+void SerializerStrategy::SerializeQLstmLayer(const armnn::IConnectableLayer* layer,
+ const armnn::QLstmDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name)
{
IgnoreUnused(name);
@@ -1455,16 +1475,19 @@ void SerializerVisitor::VisitQLstmLayer(const armnn::IConnectableLayer* layer,
descriptor.m_HiddenStateScale
);
+ // Index for constants vector
+ std::size_t i = 0;
+
// Mandatory params
- auto inputToForgetWeights = CreateConstTensorInfo(*params.m_InputToForgetWeights);
- auto inputToCellWeights = CreateConstTensorInfo(*params.m_InputToCellWeights);
- auto inputToOutputWeights = CreateConstTensorInfo(*params.m_InputToOutputWeights);
- auto recurrentToForgetWeights = CreateConstTensorInfo(*params.m_RecurrentToForgetWeights);
- auto recurrentToCellWeights = CreateConstTensorInfo(*params.m_RecurrentToCellWeights);
- auto recurrentToOutputWeights = CreateConstTensorInfo(*params.m_RecurrentToOutputWeights);
- auto forgetGateBias = CreateConstTensorInfo(*params.m_ForgetGateBias);
- auto cellBias = CreateConstTensorInfo(*params.m_CellBias);
- auto outputGateBias = CreateConstTensorInfo(*params.m_OutputGateBias);
+ auto inputToForgetWeights = CreateConstTensorInfo(constants[i++]); //InputToForgetWeights
+ auto inputToCellWeights = CreateConstTensorInfo(constants[i++]); //InputToCellWeights
+ auto inputToOutputWeights = CreateConstTensorInfo(constants[i++]); //InputToOutputWeights
+ auto recurrentToForgetWeights = CreateConstTensorInfo(constants[i++]); //RecurrentToForgetWeights
+ auto recurrentToCellWeights = CreateConstTensorInfo(constants[i++]); //RecurrentToCellWeights
+ auto recurrentToOutputWeights = CreateConstTensorInfo(constants[i++]); //RecurrentToOutputWeights
+ auto forgetGateBias = CreateConstTensorInfo(constants[i++]); //ForgetGateBias
+ auto cellBias = CreateConstTensorInfo(constants[i++]); //CellBias
+ auto outputGateBias = CreateConstTensorInfo(constants[i++]); //OutputGateBias
// CIFG
flatbuffers::Offset<serializer::ConstTensor> inputToInputWeights;
@@ -1473,19 +1496,9 @@ void SerializerVisitor::VisitQLstmLayer(const armnn::IConnectableLayer* layer,
if (!descriptor.m_CifgEnabled)
{
- inputToInputWeights = CreateConstTensorInfo(*params.m_InputToInputWeights);
- recurrentToInputWeights = CreateConstTensorInfo(*params.m_RecurrentToInputWeights);
- inputGateBias = CreateConstTensorInfo(*params.m_InputGateBias);
- }
-
- // Projectiom
- flatbuffers::Offset<serializer::ConstTensor> projectionWeights;
- flatbuffers::Offset<serializer::ConstTensor> projectionBias;
-
- if (descriptor.m_ProjectionEnabled)
- {
- projectionWeights = CreateConstTensorInfo(*params.m_ProjectionWeights);
- projectionBias = CreateConstTensorInfo(*params.m_ProjectionBias);
+ inputToInputWeights = CreateConstTensorInfo(constants[i++]); //InputToInputWeights
+ recurrentToInputWeights = CreateConstTensorInfo(constants[i++]); //RecurrentToInputWeights
+ inputGateBias = CreateConstTensorInfo(constants[i++]); //InputGateBias
}
// Peephole
@@ -1497,11 +1510,20 @@ void SerializerVisitor::VisitQLstmLayer(const armnn::IConnectableLayer* layer,
{
if (!descriptor.m_CifgEnabled)
{
- cellToInputWeights = CreateConstTensorInfo(*params.m_CellToInputWeights);
+ cellToInputWeights = CreateConstTensorInfo(constants[i++]); //CellToInputWeights
}
+ cellToForgetWeights = CreateConstTensorInfo(constants[i++]); //CellToForgetWeights
+ cellToOutputWeights = CreateConstTensorInfo(constants[i++]); //CellToOutputWeights
+ }
- cellToForgetWeights = CreateConstTensorInfo(*params.m_CellToForgetWeights);
- cellToOutputWeights = CreateConstTensorInfo(*params.m_CellToOutputWeights);
+ // Projection
+ flatbuffers::Offset<serializer::ConstTensor> projectionWeights;
+ flatbuffers::Offset<serializer::ConstTensor> projectionBias;
+
+ if (descriptor.m_ProjectionEnabled)
+ {
+ projectionWeights = CreateConstTensorInfo(constants[i++]); //ProjectionWeights
+ projectionBias = CreateConstTensorInfo(constants[i++]); //ProjectionBias
}
// Layer norm
@@ -1514,12 +1536,11 @@ void SerializerVisitor::VisitQLstmLayer(const armnn::IConnectableLayer* layer,
{
if (!descriptor.m_CifgEnabled)
{
- inputLayerNormWeights = CreateConstTensorInfo((*params.m_InputLayerNormWeights));
+ inputLayerNormWeights = CreateConstTensorInfo(constants[i++]); //InputLayerNormWeights
}
-
- forgetLayerNormWeights = CreateConstTensorInfo(*params.m_ForgetLayerNormWeights);
- cellLayerNormWeights = CreateConstTensorInfo(*params.m_CellLayerNormWeights);
- outputLayerNormWeights = CreateConstTensorInfo(*params.m_OutputLayerNormWeights);
+ forgetLayerNormWeights = CreateConstTensorInfo(constants[i++]); //ForgetLayerNormWeights
+ cellLayerNormWeights = CreateConstTensorInfo(constants[i++]); //CellLayerNormWeights
+ outputLayerNormWeights = CreateConstTensorInfo(constants[i++]); //OutputLayerNormWeights
}
auto fbQLstmParams = serializer::CreateQLstmInputParams(
@@ -1555,29 +1576,32 @@ void SerializerVisitor::VisitQLstmLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(fbQLstmLayer.o, serializer::Layer::Layer_QLstmLayer);
}
-void SerializerVisitor::VisitQuantizedLstmLayer(const armnn::IConnectableLayer* layer,
- const armnn::QuantizedLstmInputParams& params,
- const char* name)
+void SerializerStrategy::SerializeQuantizedLstmLayer(const armnn::IConnectableLayer* layer,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name)
{
IgnoreUnused(name);
auto fbQuantizedLstmBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_QuantizedLstm);
+ // index for constants vector
+ size_t i = 0;
+
// Get input parameters
- auto inputToInputWeights = CreateConstTensorInfo(params.GetInputToInputWeights());
- auto inputToForgetWeights = CreateConstTensorInfo(params.GetInputToForgetWeights());
- auto inputToCellWeights = CreateConstTensorInfo(params.GetInputToCellWeights());
- auto inputToOutputWeights = CreateConstTensorInfo(params.GetInputToOutputWeights());
+ auto inputToInputWeights = CreateConstTensorInfo(constants[i++]);
+ auto inputToForgetWeights = CreateConstTensorInfo(constants[i++]);
+ auto inputToCellWeights = CreateConstTensorInfo(constants[i++]);
+ auto inputToOutputWeights = CreateConstTensorInfo(constants[i++]);
- auto recurrentToInputWeights = CreateConstTensorInfo(params.GetRecurrentToInputWeights());
- auto recurrentToForgetWeights = CreateConstTensorInfo(params.GetRecurrentToForgetWeights());
- auto recurrentToCellWeights = CreateConstTensorInfo(params.GetRecurrentToCellWeights());
- auto recurrentToOutputWeights = CreateConstTensorInfo(params.GetRecurrentToOutputWeights());
+ auto recurrentToInputWeights = CreateConstTensorInfo(constants[i++]);
+ auto recurrentToForgetWeights = CreateConstTensorInfo(constants[i++]);
+ auto recurrentToCellWeights = CreateConstTensorInfo(constants[i++]);
+ auto recurrentToOutputWeights = CreateConstTensorInfo(constants[i++]);
- auto inputGateBias = CreateConstTensorInfo(params.GetInputGateBias());
- auto forgetGateBias = CreateConstTensorInfo(params.GetForgetGateBias());
- auto cellBias = CreateConstTensorInfo(params.GetCellBias());
- auto outputGateBias = CreateConstTensorInfo(params.GetOutputGateBias());
+ auto inputGateBias = CreateConstTensorInfo(constants[i++]);
+ auto forgetGateBias = CreateConstTensorInfo(constants[i++]);
+ auto cellBias = CreateConstTensorInfo(constants[i++]);
+ auto outputGateBias = CreateConstTensorInfo(constants[i++]);
auto fbQuantizedLstmParams = serializer::CreateQuantizedLstmInputParams(
m_flatBufferBuilder,
@@ -1602,7 +1626,7 @@ void SerializerVisitor::VisitQuantizedLstmLayer(const armnn::IConnectableLayer*
CreateAnyLayer(fbQuantizedLstmLayer.o, serializer::Layer::Layer_QuantizedLstmLayer);
}
-fb::Offset<serializer::LayerBase> SerializerVisitor::CreateLayerBase(const IConnectableLayer* layer,
+fb::Offset<serializer::LayerBase> SerializerStrategy::CreateLayerBase(const IConnectableLayer* layer,
const serializer::LayerType layerType)
{
@@ -1619,7 +1643,7 @@ fb::Offset<serializer::LayerBase> SerializerVisitor::CreateLayerBase(const IConn
m_flatBufferBuilder.CreateVector(outputSlots));
}
-void SerializerVisitor::CreateAnyLayer(const flatbuffers::Offset<void>& layer, const serializer::Layer serializerLayer)
+void SerializerStrategy::CreateAnyLayer(const flatbuffers::Offset<void>& layer, const serializer::Layer serializerLayer)
{
auto anyLayer = armnnSerializer::CreateAnyLayer(m_flatBufferBuilder, serializerLayer, layer);
@@ -1627,7 +1651,7 @@ void SerializerVisitor::CreateAnyLayer(const flatbuffers::Offset<void>& layer, c
}
template <typename T>
-flatbuffers::Offset<flatbuffers::Vector<T>> SerializerVisitor::CreateDataVector(const void* memory, unsigned int size)
+flatbuffers::Offset<flatbuffers::Vector<T>> SerializerStrategy::CreateDataVector(const void* memory, unsigned int size)
{
const T* buffer = reinterpret_cast<const T*>(memory);
std::vector<T> vector(buffer, buffer + (size / sizeof(T)));
@@ -1635,7 +1659,7 @@ flatbuffers::Offset<flatbuffers::Vector<T>> SerializerVisitor::CreateDataVector(
return fbVector;
}
-flatbuffers::Offset<TensorInfo> SerializerVisitor::CreateTensorInfo(const armnn::TensorInfo& tensorInfo)
+flatbuffers::Offset<TensorInfo> SerializerStrategy::CreateTensorInfo(const armnn::TensorInfo& tensorInfo)
{
// Get the dimensions
std::vector<unsigned int> shape;
@@ -1674,7 +1698,7 @@ flatbuffers::Offset<TensorInfo> SerializerVisitor::CreateTensorInfo(const armnn
}
flatbuffers::Offset<serializer::ConstTensor>
- SerializerVisitor::CreateConstTensorInfo(const armnn::ConstTensor& constTensor)
+ SerializerStrategy::CreateConstTensorInfo(const armnn::ConstTensor& constTensor)
{
armnn::TensorInfo tensorInfo = constTensor.GetInfo();
@@ -1724,7 +1748,7 @@ flatbuffers::Offset<serializer::ConstTensor>
return flatBufferConstTensor;
}
-flatbuffers::Offset<armnnSerializer::FeatureCompatibilityVersions> SerializerVisitor::GetVersionTable()
+flatbuffers::Offset<armnnSerializer::FeatureCompatibilityVersions> SerializerStrategy::GetVersionTable()
{
flatbuffers::Offset<armnnSerializer::FeatureCompatibilityVersions> versionsTable =
serializer::CreateFeatureCompatibilityVersions(
@@ -1735,7 +1759,7 @@ flatbuffers::Offset<armnnSerializer::FeatureCompatibilityVersions> SerializerVis
}
std::vector<fb::Offset<serializer::InputSlot>>
- SerializerVisitor::CreateInputSlots(const armnn::IConnectableLayer* layer)
+ SerializerStrategy::CreateInputSlots(const armnn::IConnectableLayer* layer)
{
std::vector<fb::Offset<serializer::InputSlot>> inputSlots;
@@ -1757,7 +1781,7 @@ std::vector<fb::Offset<serializer::InputSlot>>
}
std::vector<fb::Offset<serializer::OutputSlot>>
- SerializerVisitor::CreateOutputSlots(const armnn::IConnectableLayer* layer)
+ SerializerStrategy::CreateOutputSlots(const armnn::IConnectableLayer* layer)
{
std::vector<fb::Offset<serializer::OutputSlot>> outputSlots;
@@ -1775,32 +1799,421 @@ std::vector<fb::Offset<serializer::OutputSlot>>
return outputSlots;
}
+void SerializerStrategy::ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id)
+{
+ IgnoreUnused(constants);
+
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Activation :
+ {
+ const armnn::ActivationDescriptor& layerDescriptor =
+ static_cast<const armnn::ActivationDescriptor&>(descriptor);
+ SerializeActivationLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Addition :
+ {
+ SerializeAdditionLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::ArgMinMax :
+ {
+ const armnn::ArgMinMaxDescriptor& layerDescriptor =
+ static_cast<const armnn::ArgMinMaxDescriptor&>(descriptor);
+ SerializeArgMinMaxLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::BatchNormalization :
+ {
+ const armnn::BatchNormalizationDescriptor& layerDescriptor =
+ static_cast<const armnn::BatchNormalizationDescriptor&>(descriptor);
+ SerializeBatchNormalizationLayer(layer,
+ layerDescriptor,
+ constants,
+ name);
+ break;
+ }
+ case armnn::LayerType::BatchToSpaceNd :
+ {
+ const armnn::BatchToSpaceNdDescriptor& layerDescriptor =
+ static_cast<const armnn::BatchToSpaceNdDescriptor&>(descriptor);
+ SerializeBatchToSpaceNdLayer(layer,
+ layerDescriptor,
+ name);
+ break;
+ }
+ case armnn::LayerType::Comparison :
+ {
+ const armnn::ComparisonDescriptor& layerDescriptor =
+ static_cast<const armnn::ComparisonDescriptor&>(descriptor);
+ SerializeComparisonLayer(layer,
+ layerDescriptor,
+ name);
+ break;
+ }
+ case armnn::LayerType::Concat :
+ {
+ const armnn::ConcatDescriptor& layerDescriptor =
+ static_cast<const armnn::ConcatDescriptor&>(descriptor);
+ SerializeConcatLayer(layer,
+ layerDescriptor,
+ name);
+ break;
+ }
+ case armnn::LayerType::Constant :
+ {
+ SerializeConstantLayer(layer,
+ constants,
+ name);
+ break;
+ }
+ case armnn::LayerType::Convolution2d :
+ {
+ const armnn::Convolution2dDescriptor& layerDescriptor =
+ static_cast<const armnn::Convolution2dDescriptor&>(descriptor);
+ SerializeConvolution2dLayer(layer,
+ layerDescriptor,
+ constants,
+ name);
+ break;
+ }
+ case armnn::LayerType::DepthToSpace :
+ {
+ const armnn::DepthToSpaceDescriptor& layerDescriptor =
+ static_cast<const armnn::DepthToSpaceDescriptor&>(descriptor);
+ SerializeDepthToSpaceLayer(layer,
+ layerDescriptor,
+ name);
+ break;
+ }
+ case armnn::LayerType::DepthwiseConvolution2d :
+ {
+ const armnn::DepthwiseConvolution2dDescriptor& layerDescriptor =
+ static_cast<const armnn::DepthwiseConvolution2dDescriptor&>(descriptor);
+ SerializeDepthwiseConvolution2dLayer(layer,
+ layerDescriptor,
+ constants,
+ name);
+ break;
+ }
+ case armnn::LayerType::Dequantize :
+ {
+ SerializeDequantizeLayer(layer,
+ name);
+ break;
+ }
+ case armnn::LayerType::DetectionPostProcess :
+ {
+ const armnn::DetectionPostProcessDescriptor& layerDescriptor =
+ static_cast<const armnn::DetectionPostProcessDescriptor&>(descriptor);
+ SerializeDetectionPostProcessLayer(layer, layerDescriptor, constants, name);
+ break;
+ }
+ case armnn::LayerType::Division :
+ {
+ SerializeDivisionLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::ElementwiseUnary :
+ {
+ const armnn::ElementwiseUnaryDescriptor& layerDescriptor =
+ static_cast<const armnn::ElementwiseUnaryDescriptor&>(descriptor);
+ SerializeElementwiseUnaryLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Fill :
+ {
+ const armnn::FillDescriptor& layerDescriptor =
+ static_cast<const armnn::FillDescriptor&>(descriptor);
+ SerializeFillLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Floor :
+ {
+ SerializeFloorLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::FullyConnected :
+ {
+ const armnn::FullyConnectedDescriptor& layerDescriptor =
+ static_cast<const armnn::FullyConnectedDescriptor&>(descriptor);
+ SerializeFullyConnectedLayer(layer, layerDescriptor, constants, name);
+ break;
+ }
+ case armnn::LayerType::Gather :
+ {
+ const armnn::GatherDescriptor& layerDescriptor =
+ static_cast<const armnn::GatherDescriptor&>(descriptor);
+ SerializeGatherLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Input:
+ {
+ SerializeInputLayer(layer, id, name);
+ break;
+ }
+ case armnn::LayerType::InstanceNormalization :
+ {
+ const armnn::InstanceNormalizationDescriptor& layerDescriptor =
+ static_cast<const armnn::InstanceNormalizationDescriptor&>(descriptor);
+ SerializeInstanceNormalizationLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::L2Normalization :
+ {
+ const armnn::L2NormalizationDescriptor& layerDescriptor =
+ static_cast<const armnn::L2NormalizationDescriptor&>(descriptor);
+ SerializeL2NormalizationLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::LogicalBinary :
+ {
+ const armnn::LogicalBinaryDescriptor& layerDescriptor =
+ static_cast<const armnn::LogicalBinaryDescriptor&>(descriptor);
+ SerializeLogicalBinaryLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::LogSoftmax :
+ {
+ const armnn::LogSoftmaxDescriptor& layerDescriptor =
+ static_cast<const armnn::LogSoftmaxDescriptor&>(descriptor);
+ SerializeLogSoftmaxLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Lstm :
+ {
+ const armnn::LstmDescriptor& layerDescriptor =
+ static_cast<const armnn::LstmDescriptor&>(descriptor);
+ SerializeLstmLayer(layer, layerDescriptor, constants, name);
+ break;
+ }
+ case armnn::LayerType::QLstm :
+ {
+ const armnn::QLstmDescriptor& layerDescriptor =
+ static_cast<const armnn::QLstmDescriptor&>(descriptor);
+ SerializeQLstmLayer(layer, layerDescriptor, constants, name);
+ break;
+ }
+ case armnn::LayerType::Maximum :
+ {
+ SerializeMaximumLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::Mean :
+ {
+ const armnn::MeanDescriptor& layerDescriptor =
+ static_cast<const armnn::MeanDescriptor&>(descriptor);
+ SerializeMeanLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Merge :
+ {
+ SerializeMergeLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::Minimum :
+ {
+ SerializeMinimumLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::Multiplication :
+ {
+ SerializeMultiplicationLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::Normalization :
+ {
+ const armnn::NormalizationDescriptor& layerDescriptor =
+ static_cast<const armnn::NormalizationDescriptor&>(descriptor);
+ SerializeNormalizationLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Output:
+ {
+ SerializeOutputLayer(layer, id, name);
+ break;
+ }
+ case armnn::LayerType::Pad :
+ {
+ const armnn::PadDescriptor& layerDescriptor =
+ static_cast<const armnn::PadDescriptor&>(descriptor);
+ SerializePadLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Permute :
+ {
+ const armnn::PermuteDescriptor& layerDescriptor =
+ static_cast<const armnn::PermuteDescriptor&>(descriptor);
+ SerializePermuteLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Pooling2d :
+ {
+ const armnn::Pooling2dDescriptor& layerDescriptor =
+ static_cast<const armnn::Pooling2dDescriptor&>(descriptor);
+ SerializePooling2dLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Prelu :
+ {
+ SerializePreluLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::Quantize :
+ {
+ SerializeQuantizeLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::QuantizedLstm:
+ SerializeQuantizedLstmLayer(layer, constants, name);
+ break;
+ case armnn::LayerType::Reshape:
+ {
+ const armnn::ReshapeDescriptor &layerDescriptor =
+ static_cast<const armnn::ReshapeDescriptor &>(descriptor);
+ SerializeReshapeLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Rank:
+ {
+ SerializeRankLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::Reduce:
+ {
+ const armnn::ReduceDescriptor& layerDescriptor =
+ static_cast<const armnn::ReduceDescriptor&>(descriptor);
+ SerializeReduceLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Resize:
+ {
+ const armnn::ResizeDescriptor& layerDescriptor =
+ static_cast<const armnn::ResizeDescriptor&>(descriptor);
+ SerializeResizeLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Slice:
+ {
+ const armnn::SliceDescriptor& layerDescriptor =
+ static_cast<const armnn::SliceDescriptor&>(descriptor);
+ SerializeSliceLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Softmax:
+ {
+ const armnn::SoftmaxDescriptor& layerDescriptor =
+ static_cast<const armnn::SoftmaxDescriptor&>(descriptor);
+ SerializeSoftmaxLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::SpaceToBatchNd:
+ {
+ const armnn::SpaceToBatchNdDescriptor& layerDescriptor =
+ static_cast<const armnn::SpaceToBatchNdDescriptor&>(descriptor);
+ SerializeSpaceToBatchNdLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::SpaceToDepth:
+ {
+ const armnn::SpaceToDepthDescriptor& layerDescriptor =
+ static_cast<const armnn::SpaceToDepthDescriptor&>(descriptor);
+ SerializeSpaceToDepthLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Splitter:
+ {
+ const armnn::SplitterDescriptor& layerDescriptor =
+ static_cast<const armnn::SplitterDescriptor&>(descriptor);
+ SerializeSplitterLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Stack:
+ {
+ const armnn::StackDescriptor& layerDescriptor =
+ static_cast<const armnn::StackDescriptor&>(descriptor);
+ SerializeStackLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::StandIn:
+ {
+ const armnn::StandInDescriptor& layerDescriptor =
+ static_cast<const armnn::StandInDescriptor&>(descriptor);
+ SerializeStandInLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::StridedSlice:
+ {
+ const armnn::StridedSliceDescriptor& layerDescriptor =
+ static_cast<const armnn::StridedSliceDescriptor&>(descriptor);
+ SerializeStridedSliceLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::Subtraction:
+ {
+ SerializeSubtractionLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::Switch:
+ {
+ SerializeSwitchLayer(layer, name);
+ break;
+ }
+ case armnn::LayerType::Transpose:
+ {
+ const armnn::TransposeDescriptor& layerDescriptor =
+ static_cast<const armnn::TransposeDescriptor&>(descriptor);
+ SerializeTransposeLayer(layer, layerDescriptor, name);
+ break;
+ }
+ case armnn::LayerType::TransposeConvolution2d:
+ {
+ const armnn::TransposeConvolution2dDescriptor& layerDescriptor =
+ static_cast<const armnn::TransposeConvolution2dDescriptor&>(descriptor);
+ SerializeTransposeConvolution2dLayer(layer, layerDescriptor, constants, name);
+ break;
+ }
+ default:
+ {
+ throw InvalidArgumentException(
+ fmt::format("A layer of unknown type was given to the serializer. Layer name: {}; Layer Id: {}",
+ layer->GetName(),
+ id));
+ }
+ }
+}
+
void ISerializer::SerializerImpl::Serialize(const INetwork& inNetwork)
{
// Iterate through to network
- inNetwork.Accept(m_SerializerVisitor);
- flatbuffers::FlatBufferBuilder& fbBuilder = m_SerializerVisitor.GetFlatBufferBuilder();
+ inNetwork.ExecuteStrategy(m_SerializerStrategy);
+ flatbuffers::FlatBufferBuilder& fbBuilder = m_SerializerStrategy.GetFlatBufferBuilder();
// Create FlatBuffer SerializedGraph
auto serializedGraph = serializer::CreateSerializedGraph(
- fbBuilder,
- fbBuilder.CreateVector(m_SerializerVisitor.GetSerializedLayers()),
- fbBuilder.CreateVector(m_SerializerVisitor.GetInputIds()),
- fbBuilder.CreateVector(m_SerializerVisitor.GetOutputIds()),
- m_SerializerVisitor.GetVersionTable());
+ fbBuilder,
+ fbBuilder.CreateVector(m_SerializerStrategy.GetSerializedLayers()),
+ fbBuilder.CreateVector(m_SerializerStrategy.GetInputIds()),
+ fbBuilder.CreateVector(m_SerializerStrategy.GetOutputIds()),
+ m_SerializerStrategy.GetVersionTable());
// Serialize the graph
fbBuilder.Finish(serializedGraph);
}
+
bool ISerializer::SerializerImpl::SaveSerializedToStream(std::ostream& stream)
{
- flatbuffers::FlatBufferBuilder& fbBuilder = m_SerializerVisitor.GetFlatBufferBuilder();
+ flatbuffers::FlatBufferBuilder& fbBuilder = m_SerializerStrategy.GetFlatBufferBuilder();
auto bytesToWrite = armnn::numeric_cast<std::streamsize>(fbBuilder.GetSize());
stream.write(reinterpret_cast<const char*>(fbBuilder.GetBufferPointer()), bytesToWrite);
return !stream.bad();
}
-
} // namespace armnnSerializer
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index 10971fddc8..7226006cea 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -5,6 +5,7 @@
#pragma once
#include <armnn/ILayerVisitor.hpp>
+#include <armnn/IStrategy.hpp>
#include <armnn/LayerVisitorBase.hpp>
#include <armnnSerializer/ISerializer.hpp>
@@ -18,11 +19,17 @@
namespace armnnSerializer
{
-class SerializerVisitor : public armnn::ILayerVisitor
+class SerializerStrategy : public armnn::IStrategy
{
public:
- SerializerVisitor() : m_layerId(0) {}
- ~SerializerVisitor() {}
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id) override;
+
+ SerializerStrategy() : m_layerId(0) {}
+ ~SerializerStrategy() {}
flatbuffers::FlatBufferBuilder& GetFlatBufferBuilder()
{
@@ -46,309 +53,297 @@ public:
flatbuffers::Offset<armnnSerializer::FeatureCompatibilityVersions> GetVersionTable();
+private:
+ /// Creates the Input Slots and Output Slots and LayerBase for the layer.
+ flatbuffers::Offset<armnnSerializer::LayerBase> CreateLayerBase(
+ const armnn::IConnectableLayer* layer,
+ const armnnSerializer::LayerType layerType);
- ARMNN_DEPRECATED_MSG("Use VisitElementwiseUnaryLayer instead")
- void VisitAbsLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
-
- void VisitActivationLayer(const armnn::IConnectableLayer* layer,
- const armnn::ActivationDescriptor& descriptor,
- const char* name = nullptr) override;
-
- void VisitAdditionLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
-
- void VisitArgMinMaxLayer(const armnn::IConnectableLayer* layer,
- const armnn::ArgMinMaxDescriptor& argMinMaxDescriptor,
- const char* name = nullptr) override;
-
- void VisitBatchToSpaceNdLayer(const armnn::IConnectableLayer* layer,
- const armnn::BatchToSpaceNdDescriptor& descriptor,
- const char* name = nullptr) override;
-
- void VisitBatchNormalizationLayer(const armnn::IConnectableLayer* layer,
- const armnn::BatchNormalizationDescriptor& BatchNormalizationDescriptor,
- const armnn::ConstTensor& mean,
- const armnn::ConstTensor& variance,
- const armnn::ConstTensor& beta,
- const armnn::ConstTensor& gamma,
- const char* name = nullptr) override;
-
- void VisitComparisonLayer(const armnn::IConnectableLayer* layer,
- const armnn::ComparisonDescriptor& descriptor,
- const char* name = nullptr) override;
-
- void VisitConcatLayer(const armnn::IConnectableLayer* layer,
- const armnn::ConcatDescriptor& concatDescriptor,
- const char* name = nullptr) override;
-
- void VisitConstantLayer(const armnn::IConnectableLayer* layer,
- const armnn::ConstTensor& input,
- const char* = nullptr) override;
-
- void VisitConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const armnn::Convolution2dDescriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* = nullptr) override;
-
- void VisitDepthToSpaceLayer(const armnn::IConnectableLayer* layer,
- const armnn::DepthToSpaceDescriptor& descriptor,
- const char* name = nullptr) override;
-
- void VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const armnn::DepthwiseConvolution2dDescriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name = nullptr) override;
-
- void VisitDequantizeLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
-
- void VisitDetectionPostProcessLayer(const armnn::IConnectableLayer* layer,
- const armnn::DetectionPostProcessDescriptor& descriptor,
- const armnn::ConstTensor& anchors,
- const char* name = nullptr) override;
-
- void VisitDivisionLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
-
- void VisitElementwiseUnaryLayer(const armnn::IConnectableLayer* layer,
- const armnn::ElementwiseUnaryDescriptor& descriptor,
- const char* name = nullptr) override;
+ /// Creates the serializer AnyLayer for the layer and adds it to m_serializedLayers.
+ void CreateAnyLayer(const flatbuffers::Offset<void>& layer, const armnnSerializer::Layer serializerLayer);
- ARMNN_DEPRECATED_MSG("Use VisitComparisonLayer instead")
- void VisitEqualLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ /// Creates the serializer ConstTensor for the armnn ConstTensor.
+ flatbuffers::Offset<armnnSerializer::ConstTensor> CreateConstTensorInfo(
+ const armnn::ConstTensor& constTensor);
- void VisitFillLayer(const armnn::IConnectableLayer* layer,
- const armnn::FillDescriptor& fillDescriptor,
- const char* name = nullptr) override;
+ /// Creates the serializer TensorInfo for the armnn TensorInfo.
+ flatbuffers::Offset<TensorInfo> CreateTensorInfo(const armnn::TensorInfo& tensorInfo);
- void VisitFloorLayer(const armnn::IConnectableLayer *layer,
- const char *name = nullptr) override;
+ template <typename T>
+ flatbuffers::Offset<flatbuffers::Vector<T>> CreateDataVector(const void* memory, unsigned int size);
- void VisitFullyConnectedLayer(const armnn::IConnectableLayer* layer,
- const armnn::FullyConnectedDescriptor& fullyConnectedDescriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name = nullptr) override;
+ ///Function which maps Guid to an index
+ uint32_t GetSerializedId(armnn::LayerGuid guid);
- ARMNN_DEPRECATED_MSG("Use VisitGatherLayer with descriptor instead")
- void VisitGatherLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ /// Creates the serializer InputSlots for the layer.
+ std::vector<flatbuffers::Offset<armnnSerializer::InputSlot>> CreateInputSlots(
+ const armnn::IConnectableLayer* layer);
- void VisitGatherLayer(const armnn::IConnectableLayer* layer,
- const armnn::GatherDescriptor& gatherDescriptor,
- const char* name = nullptr) override;
+ /// Creates the serializer OutputSlots for the layer.
+ std::vector<flatbuffers::Offset<armnnSerializer::OutputSlot>> CreateOutputSlots(
+ const armnn::IConnectableLayer* layer);
- ARMNN_DEPRECATED_MSG("Use VisitComparisonLayer instead")
- void VisitGreaterLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ /// FlatBufferBuilder to create our layers' FlatBuffers.
+ flatbuffers::FlatBufferBuilder m_flatBufferBuilder;
- void VisitInputLayer(const armnn::IConnectableLayer* layer,
- armnn::LayerBindingId id,
- const char* name = nullptr) override;
+ /// AnyLayers required by the SerializedGraph.
+ std::vector<flatbuffers::Offset<armnnSerializer::AnyLayer>> m_serializedLayers;
- void VisitInstanceNormalizationLayer(const armnn::IConnectableLayer* layer,
- const armnn::InstanceNormalizationDescriptor& instanceNormalizationDescriptor,
- const char* name = nullptr) override;
+ /// Vector of the binding ids of all Input Layers required by the SerializedGraph.
+ std::vector<int> m_inputIds;
- void VisitL2NormalizationLayer(const armnn::IConnectableLayer* layer,
- const armnn::L2NormalizationDescriptor& l2NormalizationDescriptor,
- const char* name = nullptr) override;
+ /// Vector of the binding ids of all Output Layers required by the SerializedGraph.
+ std::vector<int> m_outputIds;
- void VisitLogicalBinaryLayer(const armnn::IConnectableLayer* layer,
- const armnn::LogicalBinaryDescriptor& descriptor,
- const char* name = nullptr) override;
+ /// Mapped Guids of all Layers to match our index.
+ std::unordered_map<armnn::LayerGuid, uint32_t > m_guidMap;
- void VisitLogSoftmaxLayer(const armnn::IConnectableLayer* layer,
- const armnn::LogSoftmaxDescriptor& logSoftmaxDescriptor,
- const char* name = nullptr) override;
+ /// layer within our FlatBuffer index.
+ uint32_t m_layerId;
+
+private:
+ ARMNN_DEPRECATED_MSG("Use VisitElementwiseUnaryLayer instead")
+ void SerializeAbsLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- void VisitLstmLayer(const armnn::IConnectableLayer* layer,
- const armnn::LstmDescriptor& descriptor,
- const armnn::LstmInputParams& params,
- const char* name = nullptr) override;
+ void SerializeActivationLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ActivationDescriptor& descriptor,
+ const char* name = nullptr);
- void VisitMeanLayer(const armnn::IConnectableLayer* layer,
- const armnn::MeanDescriptor& descriptor,
- const char* name) override;
+ void SerializeAdditionLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- void VisitMinimumLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ void SerializeArgMinMaxLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ArgMinMaxDescriptor& argMinMaxDescriptor,
+ const char* name = nullptr);
- void VisitMaximumLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ void SerializeBatchToSpaceNdLayer(const armnn::IConnectableLayer* layer,
+ const armnn::BatchToSpaceNdDescriptor& descriptor,
+ const char* name = nullptr);
- void VisitMergeLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ void SerializeBatchNormalizationLayer(const armnn::IConnectableLayer* layer,
+ const armnn::BatchNormalizationDescriptor& BatchNormalizationDescriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name = nullptr);
- ARMNN_DEPRECATED_MSG("Use VisitConcatLayer instead")
- void VisitMergerLayer(const armnn::IConnectableLayer* layer,
- const armnn::MergerDescriptor& mergerDescriptor,
- const char* name = nullptr) override;
+ void SerializeComparisonLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ComparisonDescriptor& descriptor,
+ const char* name = nullptr);
- void VisitMultiplicationLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ void SerializeConcatLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ConcatDescriptor& concatDescriptor,
+ const char* name = nullptr);
- void VisitOutputLayer(const armnn::IConnectableLayer* layer,
- armnn::LayerBindingId id,
- const char* name = nullptr) override;
+ void SerializeConstantLayer(const armnn::IConnectableLayer* layer,
+ const std::vector<armnn::ConstTensor>& contants,
+ const char* name = nullptr);
- void VisitPadLayer(const armnn::IConnectableLayer* layer,
- const armnn::PadDescriptor& PadDescriptor,
- const char* name = nullptr) override;
+ void SerializeConvolution2dLayer(const armnn::IConnectableLayer* layer,
+ const armnn::Convolution2dDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& contants,
+ const char* name = nullptr);
- void VisitPermuteLayer(const armnn::IConnectableLayer* layer,
- const armnn::PermuteDescriptor& PermuteDescriptor,
- const char* name = nullptr) override;
+ void SerializeDepthToSpaceLayer(const armnn::IConnectableLayer* layer,
+ const armnn::DepthToSpaceDescriptor& descriptor,
+ const char* name = nullptr);
- void VisitPooling2dLayer(const armnn::IConnectableLayer* layer,
- const armnn::Pooling2dDescriptor& pooling2dDescriptor,
- const char* name = nullptr) override;
+ void SerializeDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
+ const armnn::DepthwiseConvolution2dDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name = nullptr);
- void VisitPreluLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ void SerializeDequantizeLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- void VisitQuantizeLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ void SerializeDetectionPostProcessLayer(const armnn::IConnectableLayer* layer,
+ const armnn::DetectionPostProcessDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name = nullptr);
- void VisitQLstmLayer(const armnn::IConnectableLayer* layer,
- const armnn::QLstmDescriptor& descriptor,
- const armnn::LstmInputParams& params,
- const char* name = nullptr) override;
+ void SerializeDivisionLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- void VisitQuantizedLstmLayer(const armnn::IConnectableLayer* layer,
- const armnn::QuantizedLstmInputParams& params,
- const char* name = nullptr) override;
+ void SerializeElementwiseUnaryLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ElementwiseUnaryDescriptor& descriptor,
+ const char* name = nullptr);
- void VisitRankLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ ARMNN_DEPRECATED_MSG("Use VisitComparisonLayer instead")
+ void SerializeEqualLayer(const armnn::IConnectableLayer* layer, const char* name);
- void VisitReduceLayer(const armnn::IConnectableLayer* layer,
- const armnn::ReduceDescriptor& reduceDescriptor,
- const char* name = nullptr) override;
+ void SerializeFillLayer(const armnn::IConnectableLayer* layer,
+ const armnn::FillDescriptor& fillDescriptor,
+ const char* name = nullptr);
- void VisitReshapeLayer(const armnn::IConnectableLayer* layer,
- const armnn::ReshapeDescriptor& reshapeDescriptor,
- const char* name = nullptr) override;
+ void SerializeFloorLayer(const armnn::IConnectableLayer *layer,
+ const char *name = nullptr);
- void VisitResizeLayer(const armnn::IConnectableLayer* layer,
- const armnn::ResizeDescriptor& resizeDescriptor,
- const char* name = nullptr) override;
+ void SerializeFullyConnectedLayer(const armnn::IConnectableLayer* layer,
+ const armnn::FullyConnectedDescriptor& fullyConnectedDescriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name = nullptr);
- ARMNN_DEPRECATED_MSG("Use VisitResizeLayer instead")
- void VisitResizeBilinearLayer(const armnn::IConnectableLayer* layer,
- const armnn::ResizeBilinearDescriptor& resizeDescriptor,
- const char* name = nullptr) override;
+ void SerializeGatherLayer(const armnn::IConnectableLayer* layer,
+ const armnn::GatherDescriptor& gatherDescriptor,
+ const char* name = nullptr);
- ARMNN_DEPRECATED_MSG("Use VisitElementwiseUnaryLayer instead")
- void VisitRsqrtLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ ARMNN_DEPRECATED_MSG("Use VisitComparisonLayer instead")
+ void SerializeGreaterLayer(const armnn::IConnectableLayer* layer, const char* name = nullptr);
+
+ void SerializeInputLayer(const armnn::IConnectableLayer* layer,
+ armnn::LayerBindingId id,
+ const char* name = nullptr);
- void VisitSliceLayer(const armnn::IConnectableLayer* layer,
- const armnn::SliceDescriptor& sliceDescriptor,
- const char* name = nullptr) override;
+ void SerializeInstanceNormalizationLayer(const armnn::IConnectableLayer* layer,
+ const armnn::InstanceNormalizationDescriptor& instanceNormalizationDescriptor,
+ const char* name = nullptr);
- void VisitSoftmaxLayer(const armnn::IConnectableLayer* layer,
- const armnn::SoftmaxDescriptor& softmaxDescriptor,
- const char* name = nullptr) override;
+ void SerializeL2NormalizationLayer(const armnn::IConnectableLayer* layer,
+ const armnn::L2NormalizationDescriptor& l2NormalizationDescriptor,
+ const char* name = nullptr);
- void VisitSpaceToBatchNdLayer(const armnn::IConnectableLayer* layer,
- const armnn::SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor,
- const char* name = nullptr) override;
+ void SerializeLogicalBinaryLayer(const armnn::IConnectableLayer* layer,
+ const armnn::LogicalBinaryDescriptor& descriptor,
+ const char* name = nullptr);
- void VisitSpaceToDepthLayer(const armnn::IConnectableLayer* layer,
- const armnn::SpaceToDepthDescriptor& spaceToDepthDescriptor,
- const char* name = nullptr) override;
+ void SerializeLogSoftmaxLayer(const armnn::IConnectableLayer* layer,
+ const armnn::LogSoftmaxDescriptor& logSoftmaxDescriptor,
+ const char* name = nullptr);
- void VisitNormalizationLayer(const armnn::IConnectableLayer* layer,
- const armnn::NormalizationDescriptor& normalizationDescriptor,
- const char* name = nullptr) override;
+ void SerializeLstmLayer(const armnn::IConnectableLayer* layer,
+ const armnn::LstmDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name = nullptr);
- void VisitSplitterLayer(const armnn::IConnectableLayer* layer,
- const armnn::ViewsDescriptor& viewsDescriptor,
- const char* name = nullptr) override;
+ void SerializeMeanLayer(const armnn::IConnectableLayer* layer,
+ const armnn::MeanDescriptor& descriptor,
+ const char* name);
- void VisitStandInLayer(const armnn::IConnectableLayer* layer,
- const armnn::StandInDescriptor& standInDescriptor,
- const char* name = nullptr) override;
+ void SerializeMinimumLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- void VisitStackLayer(const armnn::IConnectableLayer* layer,
- const armnn::StackDescriptor& stackDescriptor,
- const char* name = nullptr) override;
+ void SerializeMaximumLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- void VisitStridedSliceLayer(const armnn::IConnectableLayer* layer,
- const armnn::StridedSliceDescriptor& stridedSliceDescriptor,
- const char* name = nullptr) override;
+ void SerializeMergeLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- void VisitSubtractionLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ ARMNN_DEPRECATED_MSG("Use VisitConcatLayer instead")
+ void SerializeMergerLayer(const armnn::IConnectableLayer* layer,
+ const armnn::MergerDescriptor& mergerDescriptor,
+ const char* name = nullptr);
- void VisitSwitchLayer(const armnn::IConnectableLayer* layer,
- const char* name = nullptr) override;
+ void SerializeMultiplicationLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- void VisitTransposeConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const armnn::TransposeConvolution2dDescriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* = nullptr) override;
+ void SerializeOutputLayer(const armnn::IConnectableLayer* layer,
+ armnn::LayerBindingId id,
+ const char* name = nullptr);
- void VisitTransposeLayer(const armnn::IConnectableLayer* layer,
- const armnn::TransposeDescriptor& descriptor,
- const char* name = nullptr) override;
+ void SerializePadLayer(const armnn::IConnectableLayer* layer,
+ const armnn::PadDescriptor& PadDescriptor,
+ const char* name = nullptr);
-private:
+ void SerializePermuteLayer(const armnn::IConnectableLayer* layer,
+ const armnn::PermuteDescriptor& PermuteDescriptor,
+ const char* name = nullptr);
- /// Creates the Input Slots and Output Slots and LayerBase for the layer.
- flatbuffers::Offset<armnnSerializer::LayerBase> CreateLayerBase(
- const armnn::IConnectableLayer* layer,
- const armnnSerializer::LayerType layerType);
+ void SerializePooling2dLayer(const armnn::IConnectableLayer* layer,
+ const armnn::Pooling2dDescriptor& pooling2dDescriptor,
+ const char* name = nullptr);
- /// Creates the serializer AnyLayer for the layer and adds it to m_serializedLayers.
- void CreateAnyLayer(const flatbuffers::Offset<void>& layer, const armnnSerializer::Layer serializerLayer);
+ void SerializePreluLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- /// Creates the serializer ConstTensor for the armnn ConstTensor.
- flatbuffers::Offset<armnnSerializer::ConstTensor> CreateConstTensorInfo(
- const armnn::ConstTensor& constTensor);
+ void SerializeQuantizeLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- /// Creates the serializer TensorInfo for the armnn TensorInfo.
- flatbuffers::Offset<TensorInfo> CreateTensorInfo(const armnn::TensorInfo& tensorInfo);
+ void SerializeQLstmLayer(const armnn::IConnectableLayer* layer,
+ const armnn::QLstmDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name = nullptr);
- template <typename T>
- flatbuffers::Offset<flatbuffers::Vector<T>> CreateDataVector(const void* memory, unsigned int size);
+ void SerializeQuantizedLstmLayer(const armnn::IConnectableLayer* layer,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name = nullptr);
- ///Function which maps Guid to an index
- uint32_t GetSerializedId(armnn::LayerGuid guid);
+ void SerializeRankLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- /// Creates the serializer InputSlots for the layer.
- std::vector<flatbuffers::Offset<armnnSerializer::InputSlot>> CreateInputSlots(
- const armnn::IConnectableLayer* layer);
+ void SerializeReduceLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ReduceDescriptor& reduceDescriptor,
+ const char* name = nullptr);
- /// Creates the serializer OutputSlots for the layer.
- std::vector<flatbuffers::Offset<armnnSerializer::OutputSlot>> CreateOutputSlots(
- const armnn::IConnectableLayer* layer);
+ void SerializeReshapeLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ReshapeDescriptor& reshapeDescriptor,
+ const char* name = nullptr);
- /// FlatBufferBuilder to create our layers' FlatBuffers.
- flatbuffers::FlatBufferBuilder m_flatBufferBuilder;
+ void SerializeResizeLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ResizeDescriptor& resizeDescriptor,
+ const char* name = nullptr);
- /// AnyLayers required by the SerializedGraph.
- std::vector<flatbuffers::Offset<armnnSerializer::AnyLayer>> m_serializedLayers;
+ ARMNN_DEPRECATED_MSG("Use VisitResizeLayer instead")
+ void SerializeResizeBilinearLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ResizeBilinearDescriptor& resizeDescriptor,
+ const char* name = nullptr);
- /// Vector of the binding ids of all Input Layers required by the SerializedGraph.
- std::vector<int> m_inputIds;
+ ARMNN_DEPRECATED_MSG("Use VisitElementwiseUnaryLayer instead")
+ void SerializeRsqrtLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
- /// Vector of the binding ids of all Output Layers required by the SerializedGraph.
- std::vector<int> m_outputIds;
+ void SerializeSliceLayer(const armnn::IConnectableLayer* layer,
+ const armnn::SliceDescriptor& sliceDescriptor,
+ const char* name = nullptr);
- /// Mapped Guids of all Layers to match our index.
- std::unordered_map<armnn::LayerGuid, uint32_t > m_guidMap;
+ void SerializeSoftmaxLayer(const armnn::IConnectableLayer* layer,
+ const armnn::SoftmaxDescriptor& softmaxDescriptor,
+ const char* name = nullptr);
- /// layer within our FlatBuffer index.
- uint32_t m_layerId;
+ void SerializeSpaceToBatchNdLayer(const armnn::IConnectableLayer* layer,
+ const armnn::SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor,
+ const char* name = nullptr);
+
+ void SerializeSpaceToDepthLayer(const armnn::IConnectableLayer* layer,
+ const armnn::SpaceToDepthDescriptor& spaceToDepthDescriptor,
+ const char* name = nullptr);
+
+ void SerializeNormalizationLayer(const armnn::IConnectableLayer* layer,
+ const armnn::NormalizationDescriptor& normalizationDescriptor,
+ const char* name = nullptr);
+
+ void SerializeSplitterLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ViewsDescriptor& viewsDescriptor,
+ const char* name = nullptr);
+
+ void SerializeStandInLayer(const armnn::IConnectableLayer* layer,
+ const armnn::StandInDescriptor& standInDescriptor,
+ const char* name = nullptr);
+
+ void SerializeStackLayer(const armnn::IConnectableLayer* layer,
+ const armnn::StackDescriptor& stackDescriptor,
+ const char* name = nullptr);
+
+ void SerializeStridedSliceLayer(const armnn::IConnectableLayer* layer,
+ const armnn::StridedSliceDescriptor& stridedSliceDescriptor,
+ const char* name = nullptr);
+
+ void SerializeSubtractionLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
+
+ void SerializeSwitchLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr);
+
+ void SerializeTransposeConvolution2dLayer(const armnn::IConnectableLayer* layer,
+ const armnn::TransposeConvolution2dDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* = nullptr);
+
+ void SerializeTransposeLayer(const armnn::IConnectableLayer* layer,
+ const armnn::TransposeDescriptor& descriptor,
+ const char* name = nullptr);
};
+
+
class ISerializer::SerializerImpl
{
public:
@@ -367,7 +362,7 @@ public:
private:
/// Visitor to contruct serialized network
- SerializerVisitor m_SerializerVisitor;
+ SerializerStrategy m_SerializerStrategy;
};
} //namespace armnnSerializer
diff --git a/src/armnnSerializer/test/ActivationSerializationTests.cpp b/src/armnnSerializer/test/ActivationSerializationTests.cpp
index 1645731413..fbe1ae0ad4 100644
--- a/src/armnnSerializer/test/ActivationSerializationTests.cpp
+++ b/src/armnnSerializer/test/ActivationSerializationTests.cpp
@@ -17,15 +17,20 @@
BOOST_AUTO_TEST_SUITE(SerializerTests)
-class VerifyActivationName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+class VerifyActivationName : public armnn::IStrategy
{
public:
- void VisitActivationLayer(const armnn::IConnectableLayer* layer,
- const armnn::ActivationDescriptor& activationDescriptor,
- const char* name) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- IgnoreUnused(layer, activationDescriptor);
- BOOST_TEST(name == "activation");
+ IgnoreUnused(layer, descriptor, constants, id);
+ if (layer->GetType() == armnn::LayerType::Activation)
+ {
+ BOOST_TEST(name == "activation");
+ }
}
};
@@ -67,7 +72,7 @@ BOOST_AUTO_TEST_CASE(ActivationSerialization)
armnn::INetworkPtr deserializedNetwork = parser->CreateNetworkFromBinary(serializerVector);
VerifyActivationName visitor;
- deserializedNetwork->Accept(visitor);
+ deserializedNetwork->ExecuteStrategy(visitor);
armnn::IRuntime::CreationOptions options; // default options
armnn::IRuntimePtr run = armnn::IRuntime::Create(options);
diff --git a/src/armnnSerializer/test/ComparisonSerializationTests.cpp b/src/armnnSerializer/test/ComparisonSerializationTests.cpp
new file mode 100644
index 0000000000..3aee9a7bcb
--- /dev/null
+++ b/src/armnnSerializer/test/ComparisonSerializationTests.cpp
@@ -0,0 +1,123 @@
+//
+// Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include "../Serializer.hpp"
+#include "SerializerTestUtils.hpp"
+
+#include <armnn/Descriptors.hpp>
+#include <armnn/INetwork.hpp>
+#include <armnn/IRuntime.hpp>
+#include <armnnDeserializer/IDeserializer.hpp>
+#include <armnn/utility/IgnoreUnused.hpp>
+
+#include <boost/test/unit_test.hpp>
+
+
+BOOST_AUTO_TEST_SUITE(SerializerTests)
+
+struct ComparisonModel
+{
+ ComparisonModel(const std::string& layerName,
+ const armnn::TensorInfo& inputInfo,
+ const armnn::TensorInfo& outputInfo,
+ armnn::ComparisonDescriptor& descriptor)
+ : m_network(armnn::INetwork::Create())
+ {
+ armnn::IConnectableLayer* const inputLayer0 = m_network->AddInputLayer(0);
+ armnn::IConnectableLayer* const inputLayer1 = m_network->AddInputLayer(1);
+ armnn::IConnectableLayer* const equalLayer = m_network->AddComparisonLayer(descriptor, layerName.c_str());
+ armnn::IConnectableLayer* const outputLayer = m_network->AddOutputLayer(0);
+
+ inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
+ inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
+ equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+ inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
+ inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
+ equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
+ }
+
+ armnn::INetworkPtr m_network;
+};
+
+class ComparisonLayerVerifier : public LayerVerifierBase
+{
+public:
+ ComparisonLayerVerifier(const std::string& layerName,
+ const std::vector<armnn::TensorInfo>& inputInfos,
+ const std::vector<armnn::TensorInfo>& outputInfos,
+ const armnn::ComparisonDescriptor& descriptor)
+ : LayerVerifierBase(layerName, inputInfos, outputInfos)
+ , m_Descriptor (descriptor) {}
+
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
+ {
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ case armnn::LayerType::Comparison:
+ {
+ VerifyNameAndConnections(layer, name);
+ const armnn::ComparisonDescriptor& layerDescriptor =
+ static_cast<const armnn::ComparisonDescriptor&>(descriptor);
+ BOOST_CHECK(layerDescriptor.m_Operation == m_Descriptor.m_Operation);
+ break;
+ }
+ default:
+ {
+ throw armnn::Exception("Unexpected layer type in Comparison test model");
+ }
+ }
+ }
+
+private:
+ armnn::ComparisonDescriptor m_Descriptor;
+};
+
+BOOST_AUTO_TEST_CASE(SerializeEqual)
+{
+ const std::string layerName("equal");
+
+ const armnn::TensorShape shape{2, 1, 2, 4};
+ const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32);
+ const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean);
+
+ armnn::ComparisonDescriptor descriptor (armnn::ComparisonOperation::Equal);
+
+ ComparisonModel model(layerName, inputInfo, outputInfo, descriptor);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*model.m_network));
+ BOOST_CHECK(deserializedNetwork);
+
+ ComparisonLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
+}
+
+BOOST_AUTO_TEST_CASE(SerializeGreater)
+{
+ const std::string layerName("greater");
+
+ const armnn::TensorShape shape{2, 1, 2, 4};
+ const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32);
+ const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean);
+
+ armnn::ComparisonDescriptor descriptor (armnn::ComparisonOperation::Greater);
+
+ ComparisonModel model(layerName, inputInfo, outputInfo, descriptor);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*model.m_network));
+ BOOST_CHECK(deserializedNetwork);
+
+ ComparisonLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
+}
+
+BOOST_AUTO_TEST_SUITE_END()
diff --git a/src/armnnSerializer/test/LstmSerializationTests.cpp b/src/armnnSerializer/test/LstmSerializationTests.cpp
new file mode 100644
index 0000000000..4705c0bd28
--- /dev/null
+++ b/src/armnnSerializer/test/LstmSerializationTests.cpp
@@ -0,0 +1,2199 @@
+//
+// Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include "../Serializer.hpp"
+#include "SerializerTestUtils.hpp"
+
+#include <armnn/Descriptors.hpp>
+#include <armnn/INetwork.hpp>
+#include <armnn/IRuntime.hpp>
+#include <armnnDeserializer/IDeserializer.hpp>
+#include <armnn/utility/IgnoreUnused.hpp>
+#include <armnn/LstmParams.hpp>
+#include <armnn/QuantizedLstmParams.hpp>
+
+#include <boost/test/unit_test.hpp>
+
+#include <fmt/format.h>
+
+
+BOOST_AUTO_TEST_SUITE(SerializerTests)
+
+template<typename Descriptor>
+armnn::LstmInputParams ConstantVector2LstmInputParams(const std::vector<armnn::ConstTensor>& constants,
+ Descriptor& descriptor)
+{
+ armnn::LstmInputParams lstmInputParams;
+ size_t i = 0;
+
+ // Inserting basic paramters
+ lstmInputParams.m_InputToForgetWeights = &constants[i++];
+ lstmInputParams.m_InputToCellWeights = &constants[i++];
+ lstmInputParams.m_InputToOutputWeights = &constants[i++];
+ lstmInputParams.m_RecurrentToForgetWeights = &constants[i++];
+ lstmInputParams.m_RecurrentToCellWeights = &constants[i++];
+ lstmInputParams.m_RecurrentToOutputWeights = &constants[i++];
+ lstmInputParams.m_ForgetGateBias = &constants[i++];
+ lstmInputParams.m_CellBias = &constants[i++];
+ lstmInputParams.m_OutputGateBias = &constants[i++];
+ if (!descriptor.m_CifgEnabled)
+ {
+ lstmInputParams.m_InputToInputWeights = &constants[i++];
+ lstmInputParams.m_RecurrentToInputWeights = &constants[i++];
+ lstmInputParams.m_InputGateBias = &constants[i++];
+ }
+
+ if (descriptor.m_PeepholeEnabled)
+ {
+ if (!descriptor.m_CifgEnabled)
+ {
+ lstmInputParams.m_CellToInputWeights = &constants[i++];
+ }
+ lstmInputParams.m_CellToForgetWeights = &constants[i++];
+ lstmInputParams.m_CellToOutputWeights = &constants[i++];
+ }
+
+ if (descriptor.m_ProjectionEnabled)
+ {
+ lstmInputParams.m_ProjectionWeights = &constants[i++];
+ lstmInputParams.m_ProjectionBias = &constants[i++];
+ }
+
+ if (descriptor.m_LayerNormEnabled)
+ {
+ if (!descriptor.m_CifgEnabled)
+ {
+ lstmInputParams.m_InputLayerNormWeights = &constants[i++];
+ }
+ lstmInputParams.m_ForgetLayerNormWeights = &constants[i++];
+ lstmInputParams.m_CellLayerNormWeights = &constants[i++];
+ lstmInputParams.m_OutputLayerNormWeights = &constants[i++];
+ }
+
+ return lstmInputParams;
+}
+
+// Works for Lstm and QLstm (QuantizedLstm uses different parameters)
+template<typename Descriptor>
+class VerifyLstmLayer : public LayerVerifierBaseWithDescriptor<Descriptor>
+{
+public:
+ VerifyLstmLayer(const std::string& layerName,
+ const std::vector<armnn::TensorInfo>& inputInfos,
+ const std::vector<armnn::TensorInfo>& outputInfos,
+ const Descriptor& descriptor,
+ const armnn::LstmInputParams& inputParams)
+ : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
+ , m_InputParams(inputParams) {}
+
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
+ {
+ armnn::IgnoreUnused(constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ case armnn::LayerType::Lstm:
+ {
+ this->VerifyNameAndConnections(layer, name);
+ const Descriptor& internalDescriptor = static_cast<const Descriptor&>(descriptor);
+ this->VerifyDescriptor(internalDescriptor);
+ armnn::LstmInputParams lstmParams = ConstantVector2LstmInputParams(constants, internalDescriptor);
+ VerifyInputParameters(lstmParams);
+ break;
+ }
+ case armnn::LayerType::QLstm:
+ {
+ this->VerifyNameAndConnections(layer, name);
+ const Descriptor& internalDescriptor = static_cast<const Descriptor&>(descriptor);
+ this->VerifyDescriptor(internalDescriptor);
+ armnn::LstmInputParams lstmParams = ConstantVector2LstmInputParams(constants, internalDescriptor);
+ VerifyInputParameters(lstmParams);
+ break;
+ }
+ default:
+ {
+ throw armnn::Exception("Unexpected layer type in Lstm test model");
+ }
+ }
+ }
+
+protected:
+ void VerifyInputParameters(const armnn::LstmInputParams& params)
+ {
+ this->VerifyConstTensors(
+ "m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
+ this->VerifyConstTensors(
+ "m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
+ this->VerifyConstTensors(
+ "m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
+ this->VerifyConstTensors(
+ "m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
+ this->VerifyConstTensors(
+ "m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
+ this->VerifyConstTensors(
+ "m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
+ this->VerifyConstTensors(
+ "m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
+ this->VerifyConstTensors(
+ "m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
+ this->VerifyConstTensors(
+ "m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights);
+ this->VerifyConstTensors(
+ "m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights);
+ this->VerifyConstTensors(
+ "m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights);
+ this->VerifyConstTensors(
+ "m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias);
+ this->VerifyConstTensors(
+ "m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
+ this->VerifyConstTensors(
+ "m_CellBias", m_InputParams.m_CellBias, params.m_CellBias);
+ this->VerifyConstTensors(
+ "m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
+ this->VerifyConstTensors(
+ "m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights);
+ this->VerifyConstTensors(
+ "m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias);
+ this->VerifyConstTensors(
+ "m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights);
+ this->VerifyConstTensors(
+ "m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights);
+ this->VerifyConstTensors(
+ "m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights);
+ this->VerifyConstTensors(
+ "m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights);
+ }
+
+private:
+ armnn::LstmInputParams m_InputParams;
+};
+
+BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmCifgPeepholeNoProjection)
+{
+ armnn::LstmDescriptor descriptor;
+ descriptor.m_ActivationFunc = 4;
+ descriptor.m_ClippingThresProj = 0.0f;
+ descriptor.m_ClippingThresCell = 0.0f;
+ descriptor.m_CifgEnabled = true; // if this is true then we DON'T need to set the OptCifgParams
+ descriptor.m_ProjectionEnabled = false;
+ descriptor.m_PeepholeEnabled = true;
+
+ const uint32_t batchSize = 1;
+ const uint32_t inputSize = 2;
+ const uint32_t numUnits = 4;
+ const uint32_t outputSize = numUnits;
+
+ armnn::TensorInfo inputWeightsInfo1({numUnits, inputSize}, armnn::DataType::Float32);
+ std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
+ armnn::ConstTensor inputToForgetWeights(inputWeightsInfo1, inputToForgetWeightsData);
+
+ std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
+ armnn::ConstTensor inputToCellWeights(inputWeightsInfo1, inputToCellWeightsData);
+
+ std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
+ armnn::ConstTensor inputToOutputWeights(inputWeightsInfo1, inputToOutputWeightsData);
+
+ armnn::TensorInfo inputWeightsInfo2({numUnits, outputSize}, armnn::DataType::Float32);
+ std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
+ armnn::ConstTensor recurrentToForgetWeights(inputWeightsInfo2, recurrentToForgetWeightsData);
+
+ std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
+ armnn::ConstTensor recurrentToCellWeights(inputWeightsInfo2, recurrentToCellWeightsData);
+
+ std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
+ armnn::ConstTensor recurrentToOutputWeights(inputWeightsInfo2, recurrentToOutputWeightsData);
+
+ armnn::TensorInfo inputWeightsInfo3({numUnits}, armnn::DataType::Float32);
+ std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
+ armnn::ConstTensor cellToForgetWeights(inputWeightsInfo3, cellToForgetWeightsData);
+
+ std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
+ armnn::ConstTensor cellToOutputWeights(inputWeightsInfo3, cellToOutputWeightsData);
+
+ std::vector<float> forgetGateBiasData(numUnits, 1.0f);
+ armnn::ConstTensor forgetGateBias(inputWeightsInfo3, forgetGateBiasData);
+
+ std::vector<float> cellBiasData(numUnits, 0.0f);
+ armnn::ConstTensor cellBias(inputWeightsInfo3, cellBiasData);
+
+ std::vector<float> outputGateBiasData(numUnits, 0.0f);
+ armnn::ConstTensor outputGateBias(inputWeightsInfo3, outputGateBiasData);
+
+ armnn::LstmInputParams params;
+ params.m_InputToForgetWeights = &inputToForgetWeights;
+ params.m_InputToCellWeights = &inputToCellWeights;
+ params.m_InputToOutputWeights = &inputToOutputWeights;
+ params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
+ params.m_RecurrentToCellWeights = &recurrentToCellWeights;
+ params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
+ params.m_ForgetGateBias = &forgetGateBias;
+ params.m_CellBias = &cellBias;
+ params.m_OutputGateBias = &outputGateBias;
+ params.m_CellToForgetWeights = &cellToForgetWeights;
+ params.m_CellToOutputWeights = &cellToOutputWeights;
+
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
+ const std::string layerName("lstm");
+ armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
+ armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
+ armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
+
+ // connect up
+ armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
+ armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
+ armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
+ armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 3 }, armnn::DataType::Float32);
+
+ inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
+ inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
+
+ outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
+ outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
+
+ cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
+ cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
+
+ lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
+
+ lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
+
+ lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
+
+ lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyLstmLayer<armnn::LstmDescriptor> checker(
+ layerName,
+ {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
+ {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
+ descriptor,
+ params);
+ deserializedNetwork->ExecuteStrategy(checker);
+}
+
+BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeAndProjection)
+{
+ armnn::LstmDescriptor descriptor;
+ descriptor.m_ActivationFunc = 4;
+ descriptor.m_ClippingThresProj = 0.0f;
+ descriptor.m_ClippingThresCell = 0.0f;
+ descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
+ descriptor.m_ProjectionEnabled = true;
+ descriptor.m_PeepholeEnabled = true;
+
+ const uint32_t batchSize = 2;
+ const uint32_t inputSize = 5;
+ const uint32_t numUnits = 20;
+ const uint32_t outputSize = 16;
+
+ armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
+ std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
+ armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
+
+ std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
+ armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
+
+ std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
+ armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
+
+ std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
+ armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
+
+ armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
+ std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
+
+ std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
+
+ std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
+
+ std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
+
+ armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
+ std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
+ armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
+
+ std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
+ armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
+
+ std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
+ armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
+
+ std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
+ armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
+
+ std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
+
+ std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
+
+ std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
+
+ armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
+ std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
+ armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
+
+ armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
+ std::vector<float> projectionBiasData(outputSize, 0.f);
+ armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
+
+ armnn::LstmInputParams params;
+ params.m_InputToForgetWeights = &inputToForgetWeights;
+ params.m_InputToCellWeights = &inputToCellWeights;
+ params.m_InputToOutputWeights = &inputToOutputWeights;
+ params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
+ params.m_RecurrentToCellWeights = &recurrentToCellWeights;
+ params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
+ params.m_ForgetGateBias = &forgetGateBias;
+ params.m_CellBias = &cellBias;
+ params.m_OutputGateBias = &outputGateBias;
+
+ // additional params because: descriptor.m_CifgEnabled = false
+ params.m_InputToInputWeights = &inputToInputWeights;
+ params.m_RecurrentToInputWeights = &recurrentToInputWeights;
+ params.m_CellToInputWeights = &cellToInputWeights;
+ params.m_InputGateBias = &inputGateBias;
+
+ // additional params because: descriptor.m_ProjectionEnabled = true
+ params.m_ProjectionWeights = &projectionWeights;
+ params.m_ProjectionBias = &projectionBias;
+
+ // additional params because: descriptor.m_PeepholeEnabled = true
+ params.m_CellToForgetWeights = &cellToForgetWeights;
+ params.m_CellToOutputWeights = &cellToOutputWeights;
+
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
+ const std::string layerName("lstm");
+ armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
+ armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
+ armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
+
+ // connect up
+ armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
+ armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
+ armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
+ armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
+
+ inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
+ inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
+
+ outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
+ outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
+
+ cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
+ cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
+
+ lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
+
+ lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
+
+ lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
+
+ lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyLstmLayer<armnn::LstmDescriptor> checker(
+ layerName,
+ {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
+ {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
+ descriptor,
+ params);
+ deserializedNetwork->ExecuteStrategy(checker);
+}
+
+BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeWithProjectionWithLayerNorm)
+{
+ armnn::LstmDescriptor descriptor;
+ descriptor.m_ActivationFunc = 4;
+ descriptor.m_ClippingThresProj = 0.0f;
+ descriptor.m_ClippingThresCell = 0.0f;
+ descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
+ descriptor.m_ProjectionEnabled = true;
+ descriptor.m_PeepholeEnabled = true;
+ descriptor.m_LayerNormEnabled = true;
+
+ const uint32_t batchSize = 2;
+ const uint32_t inputSize = 5;
+ const uint32_t numUnits = 20;
+ const uint32_t outputSize = 16;
+
+ armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
+ std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
+ armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
+
+ std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
+ armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
+
+ std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
+ armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
+
+ std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
+ armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
+
+ armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
+ std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
+
+ std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
+
+ std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
+
+ std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
+
+ armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
+ std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
+ armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
+
+ std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
+ armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
+
+ std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
+ armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
+
+ std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
+ armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
+
+ std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
+
+ std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
+
+ std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
+
+ armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
+ std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
+ armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
+
+ armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
+ std::vector<float> projectionBiasData(outputSize, 0.f);
+ armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
+
+ std::vector<float> inputLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor inputLayerNormWeights(tensorInfo20, forgetGateBiasData);
+
+ std::vector<float> forgetLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor forgetLayerNormWeights(tensorInfo20, forgetGateBiasData);
+
+ std::vector<float> cellLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor cellLayerNormWeights(tensorInfo20, forgetGateBiasData);
+
+ std::vector<float> outLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
+ armnn::ConstTensor outLayerNormWeights(tensorInfo20, forgetGateBiasData);
+
+ armnn::LstmInputParams params;
+ params.m_InputToForgetWeights = &inputToForgetWeights;
+ params.m_InputToCellWeights = &inputToCellWeights;
+ params.m_InputToOutputWeights = &inputToOutputWeights;
+ params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
+ params.m_RecurrentToCellWeights = &recurrentToCellWeights;
+ params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
+ params.m_ForgetGateBias = &forgetGateBias;
+ params.m_CellBias = &cellBias;
+ params.m_OutputGateBias = &outputGateBias;
+
+ // additional params because: descriptor.m_CifgEnabled = false
+ params.m_InputToInputWeights = &inputToInputWeights;
+ params.m_RecurrentToInputWeights = &recurrentToInputWeights;
+ params.m_CellToInputWeights = &cellToInputWeights;
+ params.m_InputGateBias = &inputGateBias;
+
+ // additional params because: descriptor.m_ProjectionEnabled = true
+ params.m_ProjectionWeights = &projectionWeights;
+ params.m_ProjectionBias = &projectionBias;
+
+ // additional params because: descriptor.m_PeepholeEnabled = true
+ params.m_CellToForgetWeights = &cellToForgetWeights;
+ params.m_CellToOutputWeights = &cellToOutputWeights;
+
+ // additional params because: despriptor.m_LayerNormEnabled = true
+ params.m_InputLayerNormWeights = &inputLayerNormWeights;
+ params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
+ params.m_CellLayerNormWeights = &cellLayerNormWeights;
+ params.m_OutputLayerNormWeights = &outLayerNormWeights;
+
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
+ const std::string layerName("lstm");
+ armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
+ armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
+ armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
+
+ // connect up
+ armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
+ armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
+ armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
+ armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
+
+ inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
+ inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
+
+ outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
+ outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
+
+ cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
+ cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
+
+ lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
+
+ lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
+
+ lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
+
+ lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
+ lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyLstmLayer<armnn::LstmDescriptor> checker(
+ layerName,
+ {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
+ {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
+ descriptor,
+ params);
+ deserializedNetwork->ExecuteStrategy(checker);
+}
+
+BOOST_AUTO_TEST_CASE(EnsureLstmLayersBackwardCompatibility)
+{
+ // The hex data below is a flat buffer containing a lstm layer with no Cifg, with peephole and projection
+ // enabled. That data was obtained before additional layer normalization parameters where added to the
+ // lstm serializer. That way it can be tested if a lstm model with the old parameter configuration can
+ // still be loaded
+ const std::vector<uint8_t> lstmNoCifgWithPeepholeAndProjectionModel =
+ {
+ 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
+ 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
+ 0xDC, 0x29, 0x00, 0x00, 0x38, 0x29, 0x00, 0x00, 0xB4, 0x28, 0x00, 0x00, 0x94, 0x01, 0x00, 0x00, 0x3C, 0x01,
+ 0x00, 0x00, 0xE0, 0x00, 0x00, 0x00, 0x84, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00,
+ 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x70, 0xD6, 0xFF, 0xFF,
+ 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x06, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x88, 0xD7,
+ 0xFF, 0xFF, 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xF6, 0xD6, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00,
+ 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0xE8, 0xD7, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xC8, 0xD6, 0xFF, 0xFF, 0x00, 0x00,
+ 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x5E, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xE0, 0xD7, 0xFF, 0xFF,
+ 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x4E, 0xD7, 0xFF, 0xFF, 0x06, 0x00, 0x00, 0x00, 0x10, 0x00,
+ 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0xD8,
+ 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
+ 0x04, 0x00, 0x00, 0x00, 0xB6, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x38, 0xD8, 0xFF, 0xFF, 0x08, 0x00,
+ 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xA6, 0xD7, 0xFF, 0xFF, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
+ 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x98, 0xD8, 0xFF, 0xFF,
+ 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x78, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00,
+ 0x00, 0x00, 0x0E, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x16, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
+ 0xFA, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00,
+ 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xEC, 0xD8, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x6C, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x23, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00,
+ 0x12, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0xE0, 0x25, 0x00, 0x00, 0xD0, 0x25,
+ 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x26, 0x00, 0x48, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00,
+ 0x10, 0x00, 0x14, 0x00, 0x18, 0x00, 0x1C, 0x00, 0x20, 0x00, 0x24, 0x00, 0x28, 0x00, 0x2C, 0x00, 0x30, 0x00,
+ 0x34, 0x00, 0x38, 0x00, 0x3C, 0x00, 0x40, 0x00, 0x44, 0x00, 0x26, 0x00, 0x00, 0x00, 0xC4, 0x23, 0x00, 0x00,
+ 0xF8, 0x21, 0x00, 0x00, 0x2C, 0x20, 0x00, 0x00, 0xF0, 0x1A, 0x00, 0x00, 0xB4, 0x15, 0x00, 0x00, 0x78, 0x10,
+ 0x00, 0x00, 0xF0, 0x0F, 0x00, 0x00, 0x68, 0x0F, 0x00, 0x00, 0xE0, 0x0E, 0x00, 0x00, 0x14, 0x0D, 0x00, 0x00,
+ 0xD8, 0x07, 0x00, 0x00, 0x50, 0x07, 0x00, 0x00, 0xC8, 0x06, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x14, 0x01,
+ 0x00, 0x00, 0x8C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
+ 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x5A, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
+ 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x72, 0xD8,
+ 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x82, 0xD9, 0xFF, 0xFF,
+ 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDE, 0xD8,
+ 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x14, 0x00, 0x00, 0x00, 0xF6, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x54, 0x00, 0x00, 0x00, 0x04, 0x00,
+ 0x00, 0x00, 0x06, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xD9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6A, 0xD9, 0xFF, 0xFF, 0x00, 0x00,
+ 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7A, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
+ 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x86, 0xDE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xA2, 0xDE,
+ 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB2, 0xDF, 0xFF, 0xFF,
+ 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0xDF,
+ 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x14, 0x00, 0x00, 0x00, 0x26, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00,
+ 0x00, 0x00, 0x36, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x92, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xAA, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
+ 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xBA, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xC6, 0xE4, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xE2, 0xE4, 0xFF, 0xFF,
+ 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF2, 0xE5, 0xFF, 0xFF, 0x04, 0x00,
+ 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8E, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
+ 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
+ 0x00, 0x00, 0xAA, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0xBA, 0xE7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x16, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x2E, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00,
+ 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x3E, 0xE8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9A, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xB2, 0xE7, 0xFF, 0xFF,
+ 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xC2, 0xE8, 0xFF, 0xFF, 0x04, 0x00,
+ 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1E, 0xE8, 0xFF, 0xFF,
+ 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00,
+ 0x00, 0x00, 0x36, 0xE8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x46, 0xE9, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xED, 0xFF, 0xFF,
+ 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00,
+ 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6E, 0xED, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x7E, 0xEE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x8A, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
+ 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xA6, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
+ 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB6, 0xF3, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xC2, 0xF7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xDE, 0xF7, 0xFF, 0xFF,
+ 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xF8, 0xFF, 0xFF, 0x04, 0x00,
+ 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8A, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
+ 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
+ 0x00, 0x00, 0xA6, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0xB6, 0xFA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xFB,
+ 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
+ 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x6E, 0xFB, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01,
+ 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFC, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x1A, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x0C, 0x00,
+ 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x01, 0x04, 0x00, 0x00, 0x00, 0x2E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
+ 0x22, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6C, 0x73,
+ 0x74, 0x6D, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xEC, 0x00, 0x00, 0x00, 0xD0, 0x00, 0x00, 0x00,
+ 0xB4, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x88, 0x00, 0x00, 0x00, 0x5C, 0x00, 0x00, 0x00, 0x30, 0x00,
+ 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x14, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0xA6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
+ 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x3C, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x64, 0xFF, 0xFF, 0xFF,
+ 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
+ 0xB4, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x1A, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00,
+ 0xF0, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00,
+ 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
+ 0x7E, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00,
+ 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x76, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
+ 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x68, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
+ 0x08, 0x00, 0x0E, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
+ 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00,
+ 0x0E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
+ 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
+ 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6E, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x08, 0x00,
+ 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
+ 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00,
+ 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
+ 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
+ 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
+ 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00,
+ 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
+ 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00
+ };
+
+ armnn::INetworkPtr deserializedNetwork =
+ DeserializeNetwork(std::string(lstmNoCifgWithPeepholeAndProjectionModel.begin(),
+ lstmNoCifgWithPeepholeAndProjectionModel.end()));
+
+ BOOST_CHECK(deserializedNetwork);
+
+ // generating the same model parameters which where used to serialize the model (Layer norm is not specified)
+ armnn::LstmDescriptor descriptor;
+ descriptor.m_ActivationFunc = 4;
+ descriptor.m_ClippingThresProj = 0.0f;
+ descriptor.m_ClippingThresCell = 0.0f;
+ descriptor.m_CifgEnabled = false;
+ descriptor.m_ProjectionEnabled = true;
+ descriptor.m_PeepholeEnabled = true;
+
+ const uint32_t batchSize = 2u;
+ const uint32_t inputSize = 5u;
+ const uint32_t numUnits = 20u;
+ const uint32_t outputSize = 16u;
+
+ armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
+ std::vector<float> inputToInputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
+ armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
+
+ std::vector<float> inputToForgetWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
+ armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
+
+ std::vector<float> inputToCellWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
+ armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
+
+ std::vector<float> inputToOutputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
+ armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
+
+ armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
+ std::vector<float> inputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
+ armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
+
+ std::vector<float> forgetGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
+ armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
+
+ std::vector<float> cellBiasData(tensorInfo20.GetNumElements(), 0.0f);
+ armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
+
+ std::vector<float> outputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
+ armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
+
+ armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
+ std::vector<float> recurrentToInputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
+ armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
+
+ std::vector<float> recurrentToForgetWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
+ armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
+
+ std::vector<float> recurrentToCellWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
+ armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
+
+ std::vector<float> recurrentToOutputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
+ armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
+
+ std::vector<float> cellToInputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
+ armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
+
+ std::vector<float> cellToForgetWeightsData(tensorInfo20.GetNumElements(), 0.0f);
+ armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
+
+ std::vector<float> cellToOutputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
+ armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
+
+ armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
+ std::vector<float> projectionWeightsData(tensorInfo16x20.GetNumElements(), 0.0f);
+ armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
+
+ armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
+ std::vector<float> projectionBiasData(outputSize, 0.0f);
+ armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
+
+ armnn::LstmInputParams params;
+ params.m_InputToForgetWeights = &inputToForgetWeights;
+ params.m_InputToCellWeights = &inputToCellWeights;
+ params.m_InputToOutputWeights = &inputToOutputWeights;
+ params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
+ params.m_RecurrentToCellWeights = &recurrentToCellWeights;
+ params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
+ params.m_ForgetGateBias = &forgetGateBias;
+ params.m_CellBias = &cellBias;
+ params.m_OutputGateBias = &outputGateBias;
+
+ // additional params because: descriptor.m_CifgEnabled = false
+ params.m_InputToInputWeights = &inputToInputWeights;
+ params.m_RecurrentToInputWeights = &recurrentToInputWeights;
+ params.m_CellToInputWeights = &cellToInputWeights;
+ params.m_InputGateBias = &inputGateBias;
+
+ // additional params because: descriptor.m_ProjectionEnabled = true
+ params.m_ProjectionWeights = &projectionWeights;
+ params.m_ProjectionBias = &projectionBias;
+
+ // additional params because: descriptor.m_PeepholeEnabled = true
+ params.m_CellToForgetWeights = &cellToForgetWeights;
+ params.m_CellToOutputWeights = &cellToOutputWeights;
+
+ const std::string layerName("lstm");
+ armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
+ armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
+ armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
+ armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
+
+ VerifyLstmLayer<armnn::LstmDescriptor> checker(
+ layerName,
+ {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
+ {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
+ descriptor,
+ params);
+ deserializedNetwork->ExecuteStrategy(checker);
+}
+
+armnn::QuantizedLstmInputParams ConstantsVector2QuantizedLstmInputParams(
+ const std::vector<armnn::ConstTensor>& constants)
+{
+ armnn::QuantizedLstmInputParams params;
+
+ // index for constants vector
+ size_t i = 0;
+
+ // Get input parameters
+ params.m_InputToInputWeights = &constants[i++];
+ params.m_InputToForgetWeights = &constants[i++];
+ params.m_InputToCellWeights = &constants[i++];
+ params.m_InputToOutputWeights = &constants[i++];
+
+ params.m_RecurrentToInputWeights = &constants[i++];
+ params.m_RecurrentToForgetWeights = &constants[i++];
+ params.m_RecurrentToCellWeights = &constants[i++];
+ params.m_RecurrentToOutputWeights = &constants[i++];
+
+ params.m_InputGateBias = &constants[i++];
+ params.m_ForgetGateBias = &constants[i++];
+ params.m_CellBias = &constants[i++];
+ params.m_OutputGateBias = &constants[i++];
+
+ return params;
+}
+
+class VerifyQuantizedLstmLayer : public LayerVerifierBase
+{
+
+public:
+ VerifyQuantizedLstmLayer(const std::string& layerName,
+ const std::vector<armnn::TensorInfo>& inputInfos,
+ const std::vector<armnn::TensorInfo>& outputInfos,
+ const armnn::QuantizedLstmInputParams& inputParams)
+ : LayerVerifierBase(layerName, inputInfos, outputInfos), m_InputParams(inputParams) {}
+
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
+ {
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ case armnn::LayerType::QuantizedLstm:
+ {
+ VerifyNameAndConnections(layer, name);
+ armnn::QuantizedLstmInputParams params = ConstantsVector2QuantizedLstmInputParams(constants);
+ VerifyInputParameters(params);
+ break;
+ }
+ default:
+ {
+ throw armnn::Exception(fmt::format("Unexpected layer type in QuantizedLstm test model:",
+ layer->GetName()));
+ }
+ }
+ }
+
+protected:
+ void VerifyInputParameters(const armnn::QuantizedLstmInputParams& params)
+ {
+ VerifyConstTensors("m_InputToInputWeights",
+ m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
+ VerifyConstTensors("m_InputToForgetWeights",
+ m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
+ VerifyConstTensors("m_InputToCellWeights",
+ m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
+ VerifyConstTensors("m_InputToOutputWeights",
+ m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
+ VerifyConstTensors("m_RecurrentToInputWeights",
+ m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
+ VerifyConstTensors("m_RecurrentToForgetWeights",
+ m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
+ VerifyConstTensors("m_RecurrentToCellWeights",
+ m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
+ VerifyConstTensors("m_RecurrentToOutputWeights",
+ m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
+ VerifyConstTensors("m_InputGateBias",
+ m_InputParams.m_InputGateBias, params.m_InputGateBias);
+ VerifyConstTensors("m_ForgetGateBias",
+ m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
+ VerifyConstTensors("m_CellBias",
+ m_InputParams.m_CellBias, params.m_CellBias);
+ VerifyConstTensors("m_OutputGateBias",
+ m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
+ }
+
+private:
+ armnn::QuantizedLstmInputParams m_InputParams;
+};
+
+BOOST_AUTO_TEST_CASE(SerializeDeserializeQuantizedLstm)
+{
+ const uint32_t batchSize = 1;
+ const uint32_t inputSize = 2;
+ const uint32_t numUnits = 4;
+ const uint32_t outputSize = numUnits;
+
+ // Scale/Offset for input/output, cellState In/Out, weights, bias
+ float inputOutputScale = 0.0078125f;
+ int32_t inputOutputOffset = 128;
+
+ float cellStateScale = 0.00048828125f;
+ int32_t cellStateOffset = 0;
+
+ float weightsScale = 0.00408021f;
+ int32_t weightsOffset = 100;
+
+ float biasScale = 3.1876640625e-05f;
+ int32_t biasOffset = 0;
+
+ // The shape of weight data is {outputSize, inputSize} = {4, 2}
+ armnn::TensorShape inputToInputWeightsShape = {4, 2};
+ std::vector<uint8_t> inputToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
+ armnn::TensorInfo inputToInputWeightsInfo(inputToInputWeightsShape,
+ armnn::DataType::QAsymmU8,
+ weightsScale,
+ weightsOffset);
+ armnn::ConstTensor inputToInputWeights(inputToInputWeightsInfo, inputToInputWeightsData);
+
+ armnn::TensorShape inputToForgetWeightsShape = {4, 2};
+ std::vector<uint8_t> inputToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
+ armnn::TensorInfo inputToForgetWeightsInfo(inputToForgetWeightsShape,
+ armnn::DataType::QAsymmU8,
+ weightsScale,
+ weightsOffset);
+ armnn::ConstTensor inputToForgetWeights(inputToForgetWeightsInfo, inputToForgetWeightsData);
+
+ armnn::TensorShape inputToCellWeightsShape = {4, 2};
+ std::vector<uint8_t> inputToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
+ armnn::TensorInfo inputToCellWeightsInfo(inputToCellWeightsShape,
+ armnn::DataType::QAsymmU8,
+ weightsScale,
+ weightsOffset);
+ armnn::ConstTensor inputToCellWeights(inputToCellWeightsInfo, inputToCellWeightsData);
+
+ armnn::TensorShape inputToOutputWeightsShape = {4, 2};
+ std::vector<uint8_t> inputToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
+ armnn::TensorInfo inputToOutputWeightsInfo(inputToOutputWeightsShape,
+ armnn::DataType::QAsymmU8,
+ weightsScale,
+ weightsOffset);
+ armnn::ConstTensor inputToOutputWeights(inputToOutputWeightsInfo, inputToOutputWeightsData);
+
+ // The shape of recurrent weight data is {outputSize, outputSize} = {4, 4}
+ armnn::TensorShape recurrentToInputWeightsShape = {4, 4};
+ std::vector<uint8_t> recurrentToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
+ armnn::TensorInfo recurrentToInputWeightsInfo(recurrentToInputWeightsShape,
+ armnn::DataType::QAsymmU8,
+ weightsScale,
+ weightsOffset);
+ armnn::ConstTensor recurrentToInputWeights(recurrentToInputWeightsInfo, recurrentToInputWeightsData);
+
+ armnn::TensorShape recurrentToForgetWeightsShape = {4, 4};
+ std::vector<uint8_t> recurrentToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
+ armnn::TensorInfo recurrentToForgetWeightsInfo(recurrentToForgetWeightsShape,
+ armnn::DataType::QAsymmU8,
+ weightsScale,
+ weightsOffset);
+ armnn::ConstTensor recurrentToForgetWeights(recurrentToForgetWeightsInfo, recurrentToForgetWeightsData);
+
+ armnn::TensorShape recurrentToCellWeightsShape = {4, 4};
+ std::vector<uint8_t> recurrentToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
+ armnn::TensorInfo recurrentToCellWeightsInfo(recurrentToCellWeightsShape,
+ armnn::DataType::QAsymmU8,
+ weightsScale,
+ weightsOffset);
+ armnn::ConstTensor recurrentToCellWeights(recurrentToCellWeightsInfo, recurrentToCellWeightsData);
+
+ armnn::TensorShape recurrentToOutputWeightsShape = {4, 4};
+ std::vector<uint8_t> recurrentToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
+ armnn::TensorInfo recurrentToOutputWeightsInfo(recurrentToOutputWeightsShape,
+ armnn::DataType::QAsymmU8,
+ weightsScale,
+ weightsOffset);
+ armnn::ConstTensor recurrentToOutputWeights(recurrentToOutputWeightsInfo, recurrentToOutputWeightsData);
+
+ // The shape of bias data is {outputSize} = {4}
+ armnn::TensorShape inputGateBiasShape = {4};
+ std::vector<int32_t> inputGateBiasData = {1, 2, 3, 4};
+ armnn::TensorInfo inputGateBiasInfo(inputGateBiasShape,
+ armnn::DataType::Signed32,
+ biasScale,
+ biasOffset);
+ armnn::ConstTensor inputGateBias(inputGateBiasInfo, inputGateBiasData);
+
+ armnn::TensorShape forgetGateBiasShape = {4};
+ std::vector<int32_t> forgetGateBiasData = {1, 2, 3, 4};
+ armnn::TensorInfo forgetGateBiasInfo(forgetGateBiasShape,
+ armnn::DataType::Signed32,
+ biasScale,
+ biasOffset);
+ armnn::ConstTensor forgetGateBias(forgetGateBiasInfo, forgetGateBiasData);
+
+ armnn::TensorShape cellBiasShape = {4};
+ std::vector<int32_t> cellBiasData = {1, 2, 3, 4};
+ armnn::TensorInfo cellBiasInfo(cellBiasShape,
+ armnn::DataType::Signed32,
+ biasScale,
+ biasOffset);
+ armnn::ConstTensor cellBias(cellBiasInfo, cellBiasData);
+
+ armnn::TensorShape outputGateBiasShape = {4};
+ std::vector<int32_t> outputGateBiasData = {1, 2, 3, 4};
+ armnn::TensorInfo outputGateBiasInfo(outputGateBiasShape,
+ armnn::DataType::Signed32,
+ biasScale,
+ biasOffset);
+ armnn::ConstTensor outputGateBias(outputGateBiasInfo, outputGateBiasData);
+
+ armnn::QuantizedLstmInputParams params;
+ params.m_InputToInputWeights = &inputToInputWeights;
+ params.m_InputToForgetWeights = &inputToForgetWeights;
+ params.m_InputToCellWeights = &inputToCellWeights;
+ params.m_InputToOutputWeights = &inputToOutputWeights;
+ params.m_RecurrentToInputWeights = &recurrentToInputWeights;
+ params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
+ params.m_RecurrentToCellWeights = &recurrentToCellWeights;
+ params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
+ params.m_InputGateBias = &inputGateBias;
+ params.m_ForgetGateBias = &forgetGateBias;
+ params.m_CellBias = &cellBias;
+ params.m_OutputGateBias = &outputGateBias;
+
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
+ const std::string layerName("QuantizedLstm");
+ armnn::IConnectableLayer* const quantizedLstmLayer = network->AddQuantizedLstmLayer(params, layerName.c_str());
+ armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(1);
+
+ // Connect up
+ armnn::TensorInfo inputTensorInfo({ batchSize, inputSize },
+ armnn::DataType::QAsymmU8,
+ inputOutputScale,
+ inputOutputOffset);
+ armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits },
+ armnn::DataType::QSymmS16,
+ cellStateScale,
+ cellStateOffset);
+ armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize },
+ armnn::DataType::QAsymmU8,
+ inputOutputScale,
+ inputOutputOffset);
+
+ inputLayer->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(0));
+ inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
+
+ cellStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(1));
+ cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
+
+ outputStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(2));
+ outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
+
+ quantizedLstmLayer->GetOutputSlot(0).Connect(cellStateOut->GetInputSlot(0));
+ quantizedLstmLayer->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
+
+ quantizedLstmLayer->GetOutputSlot(1).Connect(outputLayer->GetInputSlot(0));
+ quantizedLstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyQuantizedLstmLayer checker(layerName,
+ {inputTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
+ {cellStateTensorInfo, outputStateTensorInfo},
+ params);
+
+ deserializedNetwork->ExecuteStrategy(checker);
+}
+
+BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmBasic)
+{
+ armnn::QLstmDescriptor descriptor;
+
+ descriptor.m_CifgEnabled = true;
+ descriptor.m_ProjectionEnabled = false;
+ descriptor.m_PeepholeEnabled = false;
+ descriptor.m_LayerNormEnabled = false;
+
+ descriptor.m_CellClip = 0.0f;
+ descriptor.m_ProjectionClip = 0.0f;
+
+ descriptor.m_InputIntermediateScale = 0.00001f;
+ descriptor.m_ForgetIntermediateScale = 0.00001f;
+ descriptor.m_CellIntermediateScale = 0.00001f;
+ descriptor.m_OutputIntermediateScale = 0.00001f;
+
+ descriptor.m_HiddenStateScale = 0.07f;
+ descriptor.m_HiddenStateZeroPoint = 0;
+
+ const unsigned int numBatches = 2;
+ const unsigned int inputSize = 5;
+ const unsigned int outputSize = 4;
+ const unsigned int numUnits = 4;
+
+ // Scale/Offset quantization info
+ float inputScale = 0.0078f;
+ int32_t inputOffset = 0;
+
+ float outputScale = 0.0078f;
+ int32_t outputOffset = 0;
+
+ float cellStateScale = 3.5002e-05f;
+ int32_t cellStateOffset = 0;
+
+ float weightsScale = 0.007f;
+ int32_t weightsOffset = 0;
+
+ float biasScale = 3.5002e-05f / 1024;
+ int32_t biasOffset = 0;
+
+ // Weights and bias tensor and quantization info
+ armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
+ armnn::DataType::QSymmS8,
+ weightsScale,
+ weightsOffset);
+
+ armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
+ armnn::DataType::QSymmS8,
+ weightsScale,
+ weightsOffset);
+
+ armnn::TensorInfo biasInfo({numUnits}, armnn::DataType::Signed32, biasScale, biasOffset);
+
+ std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+ std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+ std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+
+ armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
+ armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
+ armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
+
+ std::vector<int8_t> recurrentToForgetWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+ std::vector<int8_t> recurrentToCellWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+ std::vector<int8_t> recurrentToOutputWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+
+ armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
+ armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
+ armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
+
+ std::vector<int32_t> forgetGateBiasData(numUnits, 1);
+ std::vector<int32_t> cellBiasData(numUnits, 0);
+ std::vector<int32_t> outputGateBiasData(numUnits, 0);
+
+ armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
+ armnn::ConstTensor cellBias(biasInfo, cellBiasData);
+ armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
+
+ // Set up params
+ armnn::LstmInputParams params;
+ params.m_InputToForgetWeights = &inputToForgetWeights;
+ params.m_InputToCellWeights = &inputToCellWeights;
+ params.m_InputToOutputWeights = &inputToOutputWeights;
+
+ params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
+ params.m_RecurrentToCellWeights = &recurrentToCellWeights;
+ params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
+
+ params.m_ForgetGateBias = &forgetGateBias;
+ params.m_CellBias = &cellBias;
+ params.m_OutputGateBias = &outputGateBias;
+
+ // Create network
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ const std::string layerName("qLstm");
+
+ armnn::IConnectableLayer* const input = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
+
+ armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
+
+ armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
+
+ // Input/Output tensor info
+ armnn::TensorInfo inputInfo({numBatches , inputSize},
+ armnn::DataType::QAsymmS8,
+ inputScale,
+ inputOffset);
+
+ armnn::TensorInfo cellStateInfo({numBatches , numUnits},
+ armnn::DataType::QSymmS16,
+ cellStateScale,
+ cellStateOffset);
+
+ armnn::TensorInfo outputStateInfo({numBatches , outputSize},
+ armnn::DataType::QAsymmS8,
+ outputScale,
+ outputOffset);
+
+ // Connect input/output slots
+ input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
+ input->GetOutputSlot(0).SetTensorInfo(inputInfo);
+
+ outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
+ outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
+
+ cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
+ cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
+
+ qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
+ qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
+
+ qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
+ qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
+
+ qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
+ qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyLstmLayer<armnn::QLstmDescriptor> checker(
+ layerName,
+ {inputInfo, cellStateInfo, outputStateInfo},
+ {outputStateInfo, cellStateInfo, outputStateInfo},
+ descriptor,
+ params);
+
+ deserializedNetwork->ExecuteStrategy(checker);
+}
+
+BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmCifgLayerNorm)
+{
+ armnn::QLstmDescriptor descriptor;
+
+ // CIFG params are used when CIFG is disabled
+ descriptor.m_CifgEnabled = true;
+ descriptor.m_ProjectionEnabled = false;
+ descriptor.m_PeepholeEnabled = false;
+ descriptor.m_LayerNormEnabled = true;
+
+ descriptor.m_CellClip = 0.0f;
+ descriptor.m_ProjectionClip = 0.0f;
+
+ descriptor.m_InputIntermediateScale = 0.00001f;
+ descriptor.m_ForgetIntermediateScale = 0.00001f;
+ descriptor.m_CellIntermediateScale = 0.00001f;
+ descriptor.m_OutputIntermediateScale = 0.00001f;
+
+ descriptor.m_HiddenStateScale = 0.07f;
+ descriptor.m_HiddenStateZeroPoint = 0;
+
+ const unsigned int numBatches = 2;
+ const unsigned int inputSize = 5;
+ const unsigned int outputSize = 4;
+ const unsigned int numUnits = 4;
+
+ // Scale/Offset quantization info
+ float inputScale = 0.0078f;
+ int32_t inputOffset = 0;
+
+ float outputScale = 0.0078f;
+ int32_t outputOffset = 0;
+
+ float cellStateScale = 3.5002e-05f;
+ int32_t cellStateOffset = 0;
+
+ float weightsScale = 0.007f;
+ int32_t weightsOffset = 0;
+
+ float layerNormScale = 3.5002e-05f;
+ int32_t layerNormOffset = 0;
+
+ float biasScale = layerNormScale / 1024;
+ int32_t biasOffset = 0;
+
+ // Weights and bias tensor and quantization info
+ armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
+ armnn::DataType::QSymmS8,
+ weightsScale,
+ weightsOffset);
+
+ armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
+ armnn::DataType::QSymmS8,
+ weightsScale,
+ weightsOffset);
+
+ armnn::TensorInfo biasInfo({numUnits},
+ armnn::DataType::Signed32,
+ biasScale,
+ biasOffset);
+
+ armnn::TensorInfo layerNormWeightsInfo({numUnits},
+ armnn::DataType::QSymmS16,
+ layerNormScale,
+ layerNormOffset);
+
+ // Mandatory params
+ std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+ std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+ std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+
+ armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
+ armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
+ armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
+
+ std::vector<int8_t> recurrentToForgetWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+ std::vector<int8_t> recurrentToCellWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+ std::vector<int8_t> recurrentToOutputWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+
+ armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
+ armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
+ armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
+
+ std::vector<int32_t> forgetGateBiasData(numUnits, 1);
+ std::vector<int32_t> cellBiasData(numUnits, 0);
+ std::vector<int32_t> outputGateBiasData(numUnits, 0);
+
+ armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
+ armnn::ConstTensor cellBias(biasInfo, cellBiasData);
+ armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
+
+ // Layer Norm
+ std::vector<int16_t> forgetLayerNormWeightsData =
+ GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
+ std::vector<int16_t> cellLayerNormWeightsData =
+ GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
+ std::vector<int16_t> outputLayerNormWeightsData =
+ GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
+
+ armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData);
+ armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData);
+ armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData);
+
+ // Set up params
+ armnn::LstmInputParams params;
+
+ // Mandatory params
+ params.m_InputToForgetWeights = &inputToForgetWeights;
+ params.m_InputToCellWeights = &inputToCellWeights;
+ params.m_InputToOutputWeights = &inputToOutputWeights;
+
+ params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
+ params.m_RecurrentToCellWeights = &recurrentToCellWeights;
+ params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
+
+ params.m_ForgetGateBias = &forgetGateBias;
+ params.m_CellBias = &cellBias;
+ params.m_OutputGateBias = &outputGateBias;
+
+ // Layer Norm
+ params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
+ params.m_CellLayerNormWeights = &cellLayerNormWeights;
+ params.m_OutputLayerNormWeights = &outputLayerNormWeights;
+
+ // Create network
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ const std::string layerName("qLstm");
+
+ armnn::IConnectableLayer* const input = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
+
+ armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
+
+ armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
+
+ // Input/Output tensor info
+ armnn::TensorInfo inputInfo({numBatches , inputSize},
+ armnn::DataType::QAsymmS8,
+ inputScale,
+ inputOffset);
+
+ armnn::TensorInfo cellStateInfo({numBatches , numUnits},
+ armnn::DataType::QSymmS16,
+ cellStateScale,
+ cellStateOffset);
+
+ armnn::TensorInfo outputStateInfo({numBatches , outputSize},
+ armnn::DataType::QAsymmS8,
+ outputScale,
+ outputOffset);
+
+ // Connect input/output slots
+ input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
+ input->GetOutputSlot(0).SetTensorInfo(inputInfo);
+
+ outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
+ outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
+
+ cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
+ cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
+
+ qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
+ qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
+
+ qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
+ qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
+
+ qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
+ qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyLstmLayer<armnn::QLstmDescriptor> checker(layerName,
+ {inputInfo, cellStateInfo, outputStateInfo},
+ {outputStateInfo, cellStateInfo, outputStateInfo},
+ descriptor,
+ params);
+
+ deserializedNetwork->ExecuteStrategy(checker);
+}
+
+BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmAdvanced)
+{
+ armnn::QLstmDescriptor descriptor;
+
+ descriptor.m_CifgEnabled = false;
+ descriptor.m_ProjectionEnabled = true;
+ descriptor.m_PeepholeEnabled = true;
+ descriptor.m_LayerNormEnabled = true;
+
+ descriptor.m_CellClip = 0.1f;
+ descriptor.m_ProjectionClip = 0.1f;
+
+ descriptor.m_InputIntermediateScale = 0.00001f;
+ descriptor.m_ForgetIntermediateScale = 0.00001f;
+ descriptor.m_CellIntermediateScale = 0.00001f;
+ descriptor.m_OutputIntermediateScale = 0.00001f;
+
+ descriptor.m_HiddenStateScale = 0.07f;
+ descriptor.m_HiddenStateZeroPoint = 0;
+
+ const unsigned int numBatches = 2;
+ const unsigned int inputSize = 5;
+ const unsigned int outputSize = 4;
+ const unsigned int numUnits = 4;
+
+ // Scale/Offset quantization info
+ float inputScale = 0.0078f;
+ int32_t inputOffset = 0;
+
+ float outputScale = 0.0078f;
+ int32_t outputOffset = 0;
+
+ float cellStateScale = 3.5002e-05f;
+ int32_t cellStateOffset = 0;
+
+ float weightsScale = 0.007f;
+ int32_t weightsOffset = 0;
+
+ float layerNormScale = 3.5002e-05f;
+ int32_t layerNormOffset = 0;
+
+ float biasScale = layerNormScale / 1024;
+ int32_t biasOffset = 0;
+
+ // Weights and bias tensor and quantization info
+ armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
+ armnn::DataType::QSymmS8,
+ weightsScale,
+ weightsOffset);
+
+ armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
+ armnn::DataType::QSymmS8,
+ weightsScale,
+ weightsOffset);
+
+ armnn::TensorInfo biasInfo({numUnits},
+ armnn::DataType::Signed32,
+ biasScale,
+ biasOffset);
+
+ armnn::TensorInfo peepholeWeightsInfo({numUnits},
+ armnn::DataType::QSymmS16,
+ weightsScale,
+ weightsOffset);
+
+ armnn::TensorInfo layerNormWeightsInfo({numUnits},
+ armnn::DataType::QSymmS16,
+ layerNormScale,
+ layerNormOffset);
+
+ armnn::TensorInfo projectionWeightsInfo({outputSize, numUnits},
+ armnn::DataType::QSymmS8,
+ weightsScale,
+ weightsOffset);
+
+ // Mandatory params
+ std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+ std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+ std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+
+ armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
+ armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
+ armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
+
+ std::vector<int8_t> recurrentToForgetWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+ std::vector<int8_t> recurrentToCellWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+ std::vector<int8_t> recurrentToOutputWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+
+ armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
+ armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
+ armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
+
+ std::vector<int32_t> forgetGateBiasData(numUnits, 1);
+ std::vector<int32_t> cellBiasData(numUnits, 0);
+ std::vector<int32_t> outputGateBiasData(numUnits, 0);
+
+ armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
+ armnn::ConstTensor cellBias(biasInfo, cellBiasData);
+ armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
+
+ // CIFG
+ std::vector<int8_t> inputToInputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
+ std::vector<int8_t> recurrentToInputWeightsData =
+ GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
+ std::vector<int32_t> inputGateBiasData(numUnits, 1);
+
+ armnn::ConstTensor inputToInputWeights(inputWeightsInfo, inputToInputWeightsData);
+ armnn::ConstTensor recurrentToInputWeights(recurrentWeightsInfo, recurrentToInputWeightsData);
+ armnn::ConstTensor inputGateBias(biasInfo, inputGateBiasData);
+
+ // Peephole
+ std::vector<int16_t> cellToInputWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
+ std::vector<int16_t> cellToForgetWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
+ std::vector<int16_t> cellToOutputWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
+
+ armnn::ConstTensor cellToInputWeights(peepholeWeightsInfo, cellToInputWeightsData);
+ armnn::ConstTensor cellToForgetWeights(peepholeWeightsInfo, cellToForgetWeightsData);
+ armnn::ConstTensor cellToOutputWeights(peepholeWeightsInfo, cellToOutputWeightsData);
+
+ // Projection
+ std::vector<int8_t> projectionWeightsData = GenerateRandomData<int8_t>(projectionWeightsInfo.GetNumElements());
+ std::vector<int32_t> projectionBiasData(outputSize, 1);
+
+ armnn::ConstTensor projectionWeights(projectionWeightsInfo, projectionWeightsData);
+ armnn::ConstTensor projectionBias(biasInfo, projectionBiasData);
+
+ // Layer Norm
+ std::vector<int16_t> inputLayerNormWeightsData =
+ GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
+ std::vector<int16_t> forgetLayerNormWeightsData =
+ GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
+ std::vector<int16_t> cellLayerNormWeightsData =
+ GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
+ std::vector<int16_t> outputLayerNormWeightsData =
+ GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
+
+ armnn::ConstTensor inputLayerNormWeights(layerNormWeightsInfo, inputLayerNormWeightsData);
+ armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData);
+ armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData);
+ armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData);
+
+ // Set up params
+ armnn::LstmInputParams params;
+
+ // Mandatory params
+ params.m_InputToForgetWeights = &inputToForgetWeights;
+ params.m_InputToCellWeights = &inputToCellWeights;
+ params.m_InputToOutputWeights = &inputToOutputWeights;
+
+ params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
+ params.m_RecurrentToCellWeights = &recurrentToCellWeights;
+ params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
+
+ params.m_ForgetGateBias = &forgetGateBias;
+ params.m_CellBias = &cellBias;
+ params.m_OutputGateBias = &outputGateBias;
+
+ // CIFG
+ params.m_InputToInputWeights = &inputToInputWeights;
+ params.m_RecurrentToInputWeights = &recurrentToInputWeights;
+ params.m_InputGateBias = &inputGateBias;
+
+ // Peephole
+ params.m_CellToInputWeights = &cellToInputWeights;
+ params.m_CellToForgetWeights = &cellToForgetWeights;
+ params.m_CellToOutputWeights = &cellToOutputWeights;
+
+ // Projection
+ params.m_ProjectionWeights = &projectionWeights;
+ params.m_ProjectionBias = &projectionBias;
+
+ // Layer Norm
+ params.m_InputLayerNormWeights = &inputLayerNormWeights;
+ params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
+ params.m_CellLayerNormWeights = &cellLayerNormWeights;
+ params.m_OutputLayerNormWeights = &outputLayerNormWeights;
+
+ // Create network
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ const std::string layerName("qLstm");
+
+ armnn::IConnectableLayer* const input = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
+
+ armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
+
+ armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
+
+ // Input/Output tensor info
+ armnn::TensorInfo inputInfo({numBatches , inputSize},
+ armnn::DataType::QAsymmS8,
+ inputScale,
+ inputOffset);
+
+ armnn::TensorInfo cellStateInfo({numBatches , numUnits},
+ armnn::DataType::QSymmS16,
+ cellStateScale,
+ cellStateOffset);
+
+ armnn::TensorInfo outputStateInfo({numBatches , outputSize},
+ armnn::DataType::QAsymmS8,
+ outputScale,
+ outputOffset);
+
+ // Connect input/output slots
+ input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
+ input->GetOutputSlot(0).SetTensorInfo(inputInfo);
+
+ outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
+ outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
+
+ cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
+ cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
+
+ qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
+ qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
+
+ qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
+ qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
+
+ qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
+ qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyLstmLayer<armnn::QLstmDescriptor> checker(layerName,
+ {inputInfo, cellStateInfo, outputStateInfo},
+ {outputStateInfo, cellStateInfo, outputStateInfo},
+ descriptor,
+ params);
+
+ deserializedNetwork->ExecuteStrategy(checker);
+}
+
+BOOST_AUTO_TEST_SUITE_END()
diff --git a/src/armnnSerializer/test/SerializerTestUtils.cpp b/src/armnnSerializer/test/SerializerTestUtils.cpp
new file mode 100644
index 0000000000..586d2a05a5
--- /dev/null
+++ b/src/armnnSerializer/test/SerializerTestUtils.cpp
@@ -0,0 +1,163 @@
+//
+// Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include "SerializerTestUtils.hpp"
+#include "../Serializer.hpp"
+
+using armnnDeserializer::IDeserializer;
+
+LayerVerifierBase::LayerVerifierBase(const std::string& layerName,
+ const std::vector<armnn::TensorInfo>& inputInfos,
+ const std::vector<armnn::TensorInfo>& outputInfos)
+ : m_LayerName(layerName)
+ , m_InputTensorInfos(inputInfos)
+ , m_OutputTensorInfos(outputInfos)
+{}
+
+void LayerVerifierBase::ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id)
+{
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ default:
+ {
+ VerifyNameAndConnections(layer, name);
+ }
+ }
+}
+
+
+void LayerVerifierBase::VerifyNameAndConnections(const armnn::IConnectableLayer* layer, const char* name)
+{
+ BOOST_TEST(name == m_LayerName.c_str());
+
+ BOOST_TEST(layer->GetNumInputSlots() == m_InputTensorInfos.size());
+ BOOST_TEST(layer->GetNumOutputSlots() == m_OutputTensorInfos.size());
+
+ for (unsigned int i = 0; i < m_InputTensorInfos.size(); i++)
+ {
+ const armnn::IOutputSlot* connectedOutput = layer->GetInputSlot(i).GetConnection();
+ BOOST_CHECK(connectedOutput);
+
+ const armnn::TensorInfo& connectedInfo = connectedOutput->GetTensorInfo();
+ BOOST_TEST(connectedInfo.GetShape() == m_InputTensorInfos[i].GetShape());
+ BOOST_TEST(
+ GetDataTypeName(connectedInfo.GetDataType()) == GetDataTypeName(m_InputTensorInfos[i].GetDataType()));
+
+ BOOST_TEST(connectedInfo.GetQuantizationScale() == m_InputTensorInfos[i].GetQuantizationScale());
+ BOOST_TEST(connectedInfo.GetQuantizationOffset() == m_InputTensorInfos[i].GetQuantizationOffset());
+ }
+
+ for (unsigned int i = 0; i < m_OutputTensorInfos.size(); i++)
+ {
+ const armnn::TensorInfo& outputInfo = layer->GetOutputSlot(i).GetTensorInfo();
+ BOOST_TEST(outputInfo.GetShape() == m_OutputTensorInfos[i].GetShape());
+ BOOST_TEST(
+ GetDataTypeName(outputInfo.GetDataType()) == GetDataTypeName(m_OutputTensorInfos[i].GetDataType()));
+
+ BOOST_TEST(outputInfo.GetQuantizationScale() == m_OutputTensorInfos[i].GetQuantizationScale());
+ BOOST_TEST(outputInfo.GetQuantizationOffset() == m_OutputTensorInfos[i].GetQuantizationOffset());
+ }
+}
+
+void LayerVerifierBase::VerifyConstTensors(const std::string& tensorName,
+ const armnn::ConstTensor* expectedPtr,
+ const armnn::ConstTensor* actualPtr)
+{
+ if (expectedPtr == nullptr)
+ {
+ BOOST_CHECK_MESSAGE(actualPtr == nullptr, tensorName + " should not exist");
+ }
+ else
+ {
+ BOOST_CHECK_MESSAGE(actualPtr != nullptr, tensorName + " should have been set");
+ if (actualPtr != nullptr)
+ {
+ const armnn::TensorInfo& expectedInfo = expectedPtr->GetInfo();
+ const armnn::TensorInfo& actualInfo = actualPtr->GetInfo();
+
+ BOOST_CHECK_MESSAGE(expectedInfo.GetShape() == actualInfo.GetShape(),
+ tensorName + " shapes don't match");
+ BOOST_CHECK_MESSAGE(
+ GetDataTypeName(expectedInfo.GetDataType()) == GetDataTypeName(actualInfo.GetDataType()),
+ tensorName + " data types don't match");
+
+ BOOST_CHECK_MESSAGE(expectedPtr->GetNumBytes() == actualPtr->GetNumBytes(),
+ tensorName + " (GetNumBytes) data sizes do not match");
+ if (expectedPtr->GetNumBytes() == actualPtr->GetNumBytes())
+ {
+ //check the data is identical
+ const char* expectedData = static_cast<const char*>(expectedPtr->GetMemoryArea());
+ const char* actualData = static_cast<const char*>(actualPtr->GetMemoryArea());
+ bool same = true;
+ for (unsigned int i = 0; i < expectedPtr->GetNumBytes(); ++i)
+ {
+ same = expectedData[i] == actualData[i];
+ if (!same)
+ {
+ break;
+ }
+ }
+ BOOST_CHECK_MESSAGE(same, tensorName + " data does not match");
+ }
+ }
+ }
+}
+
+void CompareConstTensor(const armnn::ConstTensor& tensor1, const armnn::ConstTensor& tensor2)
+{
+ BOOST_TEST(tensor1.GetShape() == tensor2.GetShape());
+ BOOST_TEST(GetDataTypeName(tensor1.GetDataType()) == GetDataTypeName(tensor2.GetDataType()));
+
+ switch (tensor1.GetDataType())
+ {
+ case armnn::DataType::Float32:
+ CompareConstTensorData<const float*>(
+ tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
+ break;
+ case armnn::DataType::QAsymmU8:
+ case armnn::DataType::Boolean:
+ CompareConstTensorData<const uint8_t*>(
+ tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
+ break;
+ case armnn::DataType::QSymmS8:
+ CompareConstTensorData<const int8_t*>(
+ tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
+ break;
+ case armnn::DataType::Signed32:
+ CompareConstTensorData<const int32_t*>(
+ tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
+ break;
+ default:
+ // Note that Float16 is not yet implemented
+ BOOST_TEST_MESSAGE("Unexpected datatype");
+ BOOST_TEST(false);
+ }
+}
+
+armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString)
+{
+ std::vector<std::uint8_t> const serializerVector{serializerString.begin(), serializerString.end()};
+ return IDeserializer::Create()->CreateNetworkFromBinary(serializerVector);
+}
+
+std::string SerializeNetwork(const armnn::INetwork& network)
+{
+ armnnSerializer::ISerializerPtr serializer = armnnSerializer::ISerializer::Create();
+
+ serializer->Serialize(network);
+
+ std::stringstream stream;
+ serializer->SaveSerializedToStream(stream);
+
+ std::string serializerString{stream.str()};
+ return serializerString;
+}
diff --git a/src/armnnSerializer/test/SerializerTestUtils.hpp b/src/armnnSerializer/test/SerializerTestUtils.hpp
new file mode 100644
index 0000000000..e085d2ef15
--- /dev/null
+++ b/src/armnnSerializer/test/SerializerTestUtils.hpp
@@ -0,0 +1,167 @@
+//
+// Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include <armnn/Descriptors.hpp>
+#include <armnn/INetwork.hpp>
+#include <armnn/TypesUtils.hpp>
+#include <armnnDeserializer/IDeserializer.hpp>
+#include <armnn/utility/IgnoreUnused.hpp>
+
+#include <random>
+#include <vector>
+
+#include <boost/test/unit_test.hpp>
+
+
+armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString);
+
+std::string SerializeNetwork(const armnn::INetwork& network);
+
+void CompareConstTensor(const armnn::ConstTensor& tensor1, const armnn::ConstTensor& tensor2);
+
+class LayerVerifierBase : public armnn::IStrategy
+{
+public:
+ LayerVerifierBase(const std::string& layerName,
+ const std::vector<armnn::TensorInfo>& inputInfos,
+ const std::vector<armnn::TensorInfo>& outputInfos);
+
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override;
+
+protected:
+ void VerifyNameAndConnections(const armnn::IConnectableLayer* layer, const char* name);
+
+ void VerifyConstTensors(const std::string& tensorName,
+ const armnn::ConstTensor* expectedPtr,
+ const armnn::ConstTensor* actualPtr);
+
+private:
+ std::string m_LayerName;
+ std::vector<armnn::TensorInfo> m_InputTensorInfos;
+ std::vector<armnn::TensorInfo> m_OutputTensorInfos;
+};
+
+template<typename Descriptor>
+class LayerVerifierBaseWithDescriptor : public LayerVerifierBase
+{
+public:
+ LayerVerifierBaseWithDescriptor(const std::string& layerName,
+ const std::vector<armnn::TensorInfo>& inputInfos,
+ const std::vector<armnn::TensorInfo>& outputInfos,
+ const Descriptor& descriptor)
+ : LayerVerifierBase(layerName, inputInfos, outputInfos)
+ , m_Descriptor(descriptor) {}
+
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
+ {
+ armnn::IgnoreUnused(constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ default:
+ {
+ VerifyNameAndConnections(layer, name);
+ const Descriptor& internalDescriptor = static_cast<const Descriptor&>(descriptor);
+ VerifyDescriptor(internalDescriptor);
+ break;
+ }
+ }
+ }
+
+protected:
+ void VerifyDescriptor(const Descriptor& descriptor)
+ {
+ BOOST_CHECK(descriptor == m_Descriptor);
+ }
+
+ Descriptor m_Descriptor;
+};
+
+template<typename T>
+void CompareConstTensorData(const void* data1, const void* data2, unsigned int numElements)
+{
+ T typedData1 = static_cast<T>(data1);
+ T typedData2 = static_cast<T>(data2);
+ BOOST_CHECK(typedData1);
+ BOOST_CHECK(typedData2);
+
+ for (unsigned int i = 0; i < numElements; i++)
+ {
+ BOOST_TEST(typedData1[i] == typedData2[i]);
+ }
+}
+
+
+template <typename Descriptor>
+class LayerVerifierBaseWithDescriptorAndConstants : public LayerVerifierBaseWithDescriptor<Descriptor>
+{
+public:
+ LayerVerifierBaseWithDescriptorAndConstants(const std::string& layerName,
+ const std::vector<armnn::TensorInfo>& inputInfos,
+ const std::vector<armnn::TensorInfo>& outputInfos,
+ const Descriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants)
+ : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
+ , m_Constants(constants) {}
+
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
+ {
+ armnn::IgnoreUnused(id);
+
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ default:
+ {
+ this->VerifyNameAndConnections(layer, name);
+ const Descriptor& internalDescriptor = static_cast<const Descriptor&>(descriptor);
+ this->VerifyDescriptor(internalDescriptor);
+
+ for(std::size_t i = 0; i < constants.size(); i++)
+ {
+ CompareConstTensor(constants[i], m_Constants[i]);
+ }
+ }
+ }
+ }
+
+private:
+ std::vector<armnn::ConstTensor> m_Constants;
+};
+
+template<typename DataType>
+static std::vector<DataType> GenerateRandomData(size_t size)
+{
+ constexpr bool isIntegerType = std::is_integral<DataType>::value;
+ using Distribution =
+ typename std::conditional<isIntegerType,
+ std::uniform_int_distribution<DataType>,
+ std::uniform_real_distribution<DataType>>::type;
+
+ static constexpr DataType lowerLimit = std::numeric_limits<DataType>::min();
+ static constexpr DataType upperLimit = std::numeric_limits<DataType>::max();
+
+ static Distribution distribution(lowerLimit, upperLimit);
+ static std::default_random_engine generator;
+
+ std::vector<DataType> randomData(size);
+ std::generate(randomData.begin(), randomData.end(), []() { return distribution(generator); });
+
+ return randomData;
+} \ No newline at end of file
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index 44e8a3898e..f261731a75 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -4,6 +4,7 @@
//
#include "../Serializer.hpp"
+#include "SerializerTestUtils.hpp"
#include <armnn/Descriptors.hpp>
#include <armnn/INetwork.hpp>
@@ -11,6 +12,7 @@
#include <armnn/LstmParams.hpp>
#include <armnn/QuantizedLstmParams.hpp>
#include <armnnDeserializer/IDeserializer.hpp>
+#include <armnn/utility/IgnoreUnused.hpp>
#include <random>
#include <vector>
@@ -19,264 +21,36 @@
using armnnDeserializer::IDeserializer;
-namespace
-{
-
-#define DECLARE_LAYER_VERIFIER_CLASS(name) \
-class name##LayerVerifier : public LayerVerifierBase \
-{ \
-public: \
- name##LayerVerifier(const std::string& layerName, \
- const std::vector<armnn::TensorInfo>& inputInfos, \
- const std::vector<armnn::TensorInfo>& outputInfos) \
- : LayerVerifierBase(layerName, inputInfos, outputInfos) {} \
-\
- void Visit##name##Layer(const armnn::IConnectableLayer* layer, const char* name) override \
- { \
- VerifyNameAndConnections(layer, name); \
- } \
-};
-
-#define DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(name) \
-class name##LayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::name##Descriptor> \
-{ \
-public: \
- name##LayerVerifier(const std::string& layerName, \
- const std::vector<armnn::TensorInfo>& inputInfos, \
- const std::vector<armnn::TensorInfo>& outputInfos, \
- const armnn::name##Descriptor& descriptor) \
- : LayerVerifierBaseWithDescriptor<armnn::name##Descriptor>( \
- layerName, inputInfos, outputInfos, descriptor) {} \
-\
- void Visit##name##Layer(const armnn::IConnectableLayer* layer, \
- const armnn::name##Descriptor& descriptor, \
- const char* name) override \
- { \
- VerifyNameAndConnections(layer, name); \
- VerifyDescriptor(descriptor); \
- } \
-};
-
-struct DefaultLayerVerifierPolicy
-{
- static void Apply(const std::string)
- {
- BOOST_TEST_MESSAGE("Unexpected layer found in network");
- BOOST_TEST(false);
- }
-};
-
-class LayerVerifierBase : public armnn::LayerVisitorBase<DefaultLayerVerifierPolicy>
-{
-public:
- LayerVerifierBase(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos)
- : m_LayerName(layerName)
- , m_InputTensorInfos(inputInfos)
- , m_OutputTensorInfos(outputInfos) {}
-
- void VisitInputLayer(const armnn::IConnectableLayer*, armnn::LayerBindingId, const char*) override {}
-
- void VisitOutputLayer(const armnn::IConnectableLayer*, armnn::LayerBindingId, const char*) override {}
-
-protected:
- void VerifyNameAndConnections(const armnn::IConnectableLayer* layer, const char* name)
- {
- BOOST_TEST(name == m_LayerName.c_str());
-
- BOOST_TEST(layer->GetNumInputSlots() == m_InputTensorInfos.size());
- BOOST_TEST(layer->GetNumOutputSlots() == m_OutputTensorInfos.size());
-
- for (unsigned int i = 0; i < m_InputTensorInfos.size(); i++)
- {
- const armnn::IOutputSlot* connectedOutput = layer->GetInputSlot(i).GetConnection();
- BOOST_CHECK(connectedOutput);
-
- const armnn::TensorInfo& connectedInfo = connectedOutput->GetTensorInfo();
- BOOST_TEST(connectedInfo.GetShape() == m_InputTensorInfos[i].GetShape());
- BOOST_TEST(
- GetDataTypeName(connectedInfo.GetDataType()) == GetDataTypeName(m_InputTensorInfos[i].GetDataType()));
-
- BOOST_TEST(connectedInfo.GetQuantizationScale() == m_InputTensorInfos[i].GetQuantizationScale());
- BOOST_TEST(connectedInfo.GetQuantizationOffset() == m_InputTensorInfos[i].GetQuantizationOffset());
- }
-
- for (unsigned int i = 0; i < m_OutputTensorInfos.size(); i++)
- {
- const armnn::TensorInfo& outputInfo = layer->GetOutputSlot(i).GetTensorInfo();
- BOOST_TEST(outputInfo.GetShape() == m_OutputTensorInfos[i].GetShape());
- BOOST_TEST(
- GetDataTypeName(outputInfo.GetDataType()) == GetDataTypeName(m_OutputTensorInfos[i].GetDataType()));
-
- BOOST_TEST(outputInfo.GetQuantizationScale() == m_OutputTensorInfos[i].GetQuantizationScale());
- BOOST_TEST(outputInfo.GetQuantizationOffset() == m_OutputTensorInfos[i].GetQuantizationOffset());
- }
- }
-
- void VerifyConstTensors(const std::string& tensorName,
- const armnn::ConstTensor* expectedPtr,
- const armnn::ConstTensor* actualPtr)
- {
- if (expectedPtr == nullptr)
- {
- BOOST_CHECK_MESSAGE(actualPtr == nullptr, tensorName + " should not exist");
- }
- else
- {
- BOOST_CHECK_MESSAGE(actualPtr != nullptr, tensorName + " should have been set");
- if (actualPtr != nullptr)
- {
- const armnn::TensorInfo& expectedInfo = expectedPtr->GetInfo();
- const armnn::TensorInfo& actualInfo = actualPtr->GetInfo();
-
- BOOST_CHECK_MESSAGE(expectedInfo.GetShape() == actualInfo.GetShape(),
- tensorName + " shapes don't match");
- BOOST_CHECK_MESSAGE(
- GetDataTypeName(expectedInfo.GetDataType()) == GetDataTypeName(actualInfo.GetDataType()),
- tensorName + " data types don't match");
-
- BOOST_CHECK_MESSAGE(expectedPtr->GetNumBytes() == actualPtr->GetNumBytes(),
- tensorName + " (GetNumBytes) data sizes do not match");
- if (expectedPtr->GetNumBytes() == actualPtr->GetNumBytes())
- {
- //check the data is identical
- const char* expectedData = static_cast<const char*>(expectedPtr->GetMemoryArea());
- const char* actualData = static_cast<const char*>(actualPtr->GetMemoryArea());
- bool same = true;
- for (unsigned int i = 0; i < expectedPtr->GetNumBytes(); ++i)
- {
- same = expectedData[i] == actualData[i];
- if (!same)
- {
- break;
- }
- }
- BOOST_CHECK_MESSAGE(same, tensorName + " data does not match");
- }
- }
- }
- }
-
-private:
- std::string m_LayerName;
- std::vector<armnn::TensorInfo> m_InputTensorInfos;
- std::vector<armnn::TensorInfo> m_OutputTensorInfos;
-};
-
-template<typename Descriptor>
-class LayerVerifierBaseWithDescriptor : public LayerVerifierBase
-{
-public:
- LayerVerifierBaseWithDescriptor(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const Descriptor& descriptor)
- : LayerVerifierBase(layerName, inputInfos, outputInfos)
- , m_Descriptor(descriptor) {}
-
-protected:
- void VerifyDescriptor(const Descriptor& descriptor)
- {
- BOOST_CHECK(descriptor == m_Descriptor);
- }
-
- Descriptor m_Descriptor;
-};
-
-template<typename T>
-void CompareConstTensorData(const void* data1, const void* data2, unsigned int numElements)
-{
- T typedData1 = static_cast<T>(data1);
- T typedData2 = static_cast<T>(data2);
- BOOST_CHECK(typedData1);
- BOOST_CHECK(typedData2);
-
- for (unsigned int i = 0; i < numElements; i++)
- {
- BOOST_TEST(typedData1[i] == typedData2[i]);
- }
-}
-
-void CompareConstTensor(const armnn::ConstTensor& tensor1, const armnn::ConstTensor& tensor2)
-{
- BOOST_TEST(tensor1.GetShape() == tensor2.GetShape());
- BOOST_TEST(GetDataTypeName(tensor1.GetDataType()) == GetDataTypeName(tensor2.GetDataType()));
-
- switch (tensor1.GetDataType())
- {
- case armnn::DataType::Float32:
- CompareConstTensorData<const float*>(
- tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
- break;
- case armnn::DataType::QAsymmU8:
- case armnn::DataType::Boolean:
- CompareConstTensorData<const uint8_t*>(
- tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
- break;
- case armnn::DataType::QSymmS8:
- CompareConstTensorData<const int8_t*>(
- tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
- break;
- case armnn::DataType::Signed32:
- CompareConstTensorData<const int32_t*>(
- tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
- break;
- default:
- // Note that Float16 is not yet implemented
- BOOST_TEST_MESSAGE("Unexpected datatype");
- BOOST_TEST(false);
- }
-}
-
-armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString)
-{
- std::vector<std::uint8_t> const serializerVector{serializerString.begin(), serializerString.end()};
- return IDeserializer::Create()->CreateNetworkFromBinary(serializerVector);
-}
+BOOST_AUTO_TEST_SUITE(SerializerTests)
-std::string SerializeNetwork(const armnn::INetwork& network)
+BOOST_AUTO_TEST_CASE(SerializeAbs)
{
- armnnSerializer::ISerializerPtr serializer = armnnSerializer::ISerializer::Create();
-
- serializer->Serialize(network);
-
- std::stringstream stream;
- serializer->SaveSerializedToStream(stream);
+ const std::string layerName("abs");
+ const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32);
- std::string serializerString{stream.str()};
- return serializerString;
-}
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
-template<typename DataType>
-static std::vector<DataType> GenerateRandomData(size_t size)
-{
- constexpr bool isIntegerType = std::is_integral<DataType>::value;
- using Distribution =
- typename std::conditional<isIntegerType,
- std::uniform_int_distribution<DataType>,
- std::uniform_real_distribution<DataType>>::type;
+ ARMNN_NO_DEPRECATE_WARN_BEGIN
+ armnn::IConnectableLayer* const absLayer = network->AddAbsLayer(layerName.c_str());
+ ARMNN_NO_DEPRECATE_WARN_END
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
- static constexpr DataType lowerLimit = std::numeric_limits<DataType>::min();
- static constexpr DataType upperLimit = std::numeric_limits<DataType>::max();
+ inputLayer->GetOutputSlot(0).Connect(absLayer->GetInputSlot(0));
+ absLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
- static Distribution distribution(lowerLimit, upperLimit);
- static std::default_random_engine generator;
+ inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
+ absLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
- std::vector<DataType> randomData(size);
- std::generate(randomData.begin(), randomData.end(), []() { return distribution(generator); });
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
- return randomData;
+ LayerVerifierBase verifier(layerName, {tensorInfo}, {tensorInfo});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
-} // anonymous namespace
-
-BOOST_AUTO_TEST_SUITE(SerializerTests)
-
BOOST_AUTO_TEST_CASE(SerializeAddition)
{
- DECLARE_LAYER_VERIFIER_CLASS(Addition)
-
const std::string layerName("addition");
const armnn::TensorInfo tensorInfo({1, 2, 3}, armnn::DataType::Float32);
@@ -294,17 +68,16 @@ BOOST_AUTO_TEST_CASE(SerializeAddition)
inputLayer1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
additionLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ std::string serializedNetwork = SerializeNetwork(*network);
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(serializedNetwork);
BOOST_CHECK(deserializedNetwork);
- AdditionLayerVerifier verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {tensorInfo, tensorInfo}, {tensorInfo});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeArgMinMax)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(ArgMinMax)
-
const std::string layerName("argminmax");
const armnn::TensorInfo inputInfo({1, 2, 3}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({1, 3}, armnn::DataType::Signed32);
@@ -327,54 +100,15 @@ BOOST_AUTO_TEST_CASE(SerializeArgMinMax)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- ArgMinMaxLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::ArgMinMaxDescriptor> verifier(layerName,
+ {inputInfo},
+ {outputInfo},
+ descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeBatchNormalization)
{
- using Descriptor = armnn::BatchNormalizationDescriptor;
- class BatchNormalizationLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
- {
- public:
- BatchNormalizationLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const Descriptor& descriptor,
- const armnn::ConstTensor& mean,
- const armnn::ConstTensor& variance,
- const armnn::ConstTensor& beta,
- const armnn::ConstTensor& gamma)
- : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
- , m_Mean(mean)
- , m_Variance(variance)
- , m_Beta(beta)
- , m_Gamma(gamma) {}
-
- void VisitBatchNormalizationLayer(const armnn::IConnectableLayer* layer,
- const Descriptor& descriptor,
- const armnn::ConstTensor& mean,
- const armnn::ConstTensor& variance,
- const armnn::ConstTensor& beta,
- const armnn::ConstTensor& gamma,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
-
- CompareConstTensor(mean, m_Mean);
- CompareConstTensor(variance, m_Variance);
- CompareConstTensor(beta, m_Beta);
- CompareConstTensor(gamma, m_Gamma);
- }
-
- private:
- armnn::ConstTensor m_Mean;
- armnn::ConstTensor m_Variance;
- armnn::ConstTensor m_Beta;
- armnn::ConstTensor m_Gamma;
- };
-
const std::string layerName("batchNormalization");
const armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
@@ -393,15 +127,21 @@ BOOST_AUTO_TEST_CASE(SerializeBatchNormalization)
std::vector<float> betaData({1.0});
std::vector<float> gammaData({0.0});
- armnn::ConstTensor mean(meanInfo, meanData);
- armnn::ConstTensor variance(varianceInfo, varianceData);
- armnn::ConstTensor beta(betaInfo, betaData);
- armnn::ConstTensor gamma(gammaInfo, gammaData);
+ std::vector<armnn::ConstTensor> constants;
+ constants.emplace_back(armnn::ConstTensor(meanInfo, meanData));
+ constants.emplace_back(armnn::ConstTensor(varianceInfo, varianceData));
+ constants.emplace_back(armnn::ConstTensor(betaInfo, betaData));
+ constants.emplace_back(armnn::ConstTensor(gammaInfo, gammaData));
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const batchNormalizationLayer =
- network->AddBatchNormalizationLayer(descriptor, mean, variance, beta, gamma, layerName.c_str());
+ network->AddBatchNormalizationLayer(descriptor,
+ constants[0],
+ constants[1],
+ constants[2],
+ constants[3],
+ layerName.c_str());
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
@@ -413,15 +153,13 @@ BOOST_AUTO_TEST_CASE(SerializeBatchNormalization)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- BatchNormalizationLayerVerifier verifier(
- layerName, {inputInfo}, {outputInfo}, descriptor, mean, variance, beta, gamma);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptorAndConstants<armnn::BatchNormalizationDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, descriptor, constants);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeBatchToSpaceNd)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(BatchToSpaceNd)
-
const std::string layerName("spaceToBatchNd");
const armnn::TensorInfo inputInfo({4, 1, 2, 2}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({1, 1, 4, 4}, armnn::DataType::Float32);
@@ -445,14 +183,15 @@ BOOST_AUTO_TEST_CASE(SerializeBatchToSpaceNd)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- BatchToSpaceNdLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::BatchToSpaceNdDescriptor> verifier(layerName,
+ {inputInfo},
+ {outputInfo},
+ desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeComparison)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Comparison)
-
const std::string layerName("comparison");
const armnn::TensorShape shape{2, 1, 2, 4};
@@ -479,8 +218,11 @@ BOOST_AUTO_TEST_CASE(SerializeComparison)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- ComparisonLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::ComparisonDescriptor> verifier(layerName,
+ { inputInfo, inputInfo },
+ { outputInfo },
+ descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeConstant)
@@ -491,22 +233,37 @@ BOOST_AUTO_TEST_CASE(SerializeConstant)
ConstantLayerVerifier(const std::string& layerName,
const std::vector<armnn::TensorInfo>& inputInfos,
const std::vector<armnn::TensorInfo>& outputInfos,
- const armnn::ConstTensor& layerInput)
+ const std::vector<armnn::ConstTensor>& constants)
: LayerVerifierBase(layerName, inputInfos, outputInfos)
- , m_LayerInput(layerInput) {}
+ , m_Constants(constants) {}
- void VisitConstantLayer(const armnn::IConnectableLayer* layer,
- const armnn::ConstTensor& input,
- const char* name) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- VerifyNameAndConnections(layer, name);
- CompareConstTensor(input, m_LayerInput);
- }
+ armnn::IgnoreUnused(descriptor, id);
- void VisitAdditionLayer(const armnn::IConnectableLayer*, const char*) override {}
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ case armnn::LayerType::Addition: break;
+ default:
+ {
+ this->VerifyNameAndConnections(layer, name);
+
+ for (std::size_t i = 0; i < constants.size(); i++)
+ {
+ CompareConstTensor(constants[i], m_Constants[i]);
+ }
+ }
+ }
+ }
private:
- armnn::ConstTensor m_LayerInput;
+ const std::vector<armnn::ConstTensor> m_Constants;
};
const std::string layerName("constant");
@@ -532,53 +289,12 @@ BOOST_AUTO_TEST_CASE(SerializeConstant)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
- deserializedNetwork->Accept(verifier);
+ ConstantLayerVerifier verifier(layerName, {}, {info}, {constTensor});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeConvolution2d)
{
- using Descriptor = armnn::Convolution2dDescriptor;
- class Convolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
- {
- public:
- Convolution2dLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases)
- : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
- , m_Weights(weights)
- , m_Biases(biases) {}
-
- void VisitConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
-
- // check weights
- CompareConstTensor(weights, m_Weights);
-
- // check biases
- BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
- BOOST_CHECK(biases.has_value() == m_Biases.has_value());
-
- if (biases.has_value() && m_Biases.has_value())
- {
- CompareConstTensor(biases.value(), m_Biases.value());
- }
- }
-
- private:
- armnn::ConstTensor m_Weights;
- armnn::Optional<armnn::ConstTensor> m_Biases;
- };
-
const std::string layerName("convolution2d");
const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
@@ -622,53 +338,14 @@ BOOST_AUTO_TEST_CASE(SerializeConvolution2d)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- Convolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
- deserializedNetwork->Accept(verifier);
+ const std::vector<armnn::ConstTensor>& constants {weights, biases};
+ LayerVerifierBaseWithDescriptorAndConstants<armnn::Convolution2dDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, descriptor, constants);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeConvolution2dWithPerAxisParams)
{
- using Descriptor = armnn::Convolution2dDescriptor;
- class Convolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
- {
- public:
- Convolution2dLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases)
- : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
- , m_Weights(weights)
- , m_Biases(biases) {}
-
- void VisitConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
-
- // check weights
- CompareConstTensor(weights, m_Weights);
-
- // check biases
- BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
- BOOST_CHECK(biases.has_value() == m_Biases.has_value());
-
- if (biases.has_value() && m_Biases.has_value())
- {
- CompareConstTensor(biases.value(), m_Biases.value());
- }
- }
-
- private:
- armnn::ConstTensor m_Weights;
- armnn::Optional<armnn::ConstTensor> m_Biases;
- };
-
using namespace armnn;
const std::string layerName("convolution2dWithPerAxis");
@@ -716,14 +393,14 @@ BOOST_AUTO_TEST_CASE(SerializeConvolution2dWithPerAxisParams)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- Convolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
- deserializedNetwork->Accept(verifier);
+ const std::vector<armnn::ConstTensor>& constants {weights, biases};
+ LayerVerifierBaseWithDescriptorAndConstants<Convolution2dDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, descriptor, constants);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeDepthToSpace)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(DepthToSpace)
-
const std::string layerName("depthToSpace");
const armnn::TensorInfo inputInfo ({ 1, 8, 4, 12 }, armnn::DataType::Float32);
@@ -747,53 +424,12 @@ BOOST_AUTO_TEST_CASE(SerializeDepthToSpace)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- DepthToSpaceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::DepthToSpaceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2d)
{
- using Descriptor = armnn::DepthwiseConvolution2dDescriptor;
- class DepthwiseConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
- {
- public:
- DepthwiseConvolution2dLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases) :
- LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor),
- m_Weights(weights),
- m_Biases(biases) {}
-
- void VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
-
- // check weights
- CompareConstTensor(weights, m_Weights);
-
- // check biases
- BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
- BOOST_CHECK(biases.has_value() == m_Biases.has_value());
-
- if (biases.has_value() && m_Biases.has_value())
- {
- CompareConstTensor(biases.value(), m_Biases.value());
- }
- }
-
- private:
- armnn::ConstTensor m_Weights;
- armnn::Optional<armnn::ConstTensor> m_Biases;
- };
-
const std::string layerName("depwiseConvolution2d");
const armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
@@ -837,53 +473,14 @@ BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2d)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- DepthwiseConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
- deserializedNetwork->Accept(verifier);
+ const std::vector<armnn::ConstTensor>& constants {weights, biases};
+ LayerVerifierBaseWithDescriptorAndConstants<armnn::DepthwiseConvolution2dDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, descriptor, constants);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2dWithPerAxisParams)
{
- using Descriptor = armnn::DepthwiseConvolution2dDescriptor;
- class DepthwiseConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
- {
- public:
- DepthwiseConvolution2dLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases) :
- LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor),
- m_Weights(weights),
- m_Biases(biases) {}
-
- void VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
-
- // check weights
- CompareConstTensor(weights, m_Weights);
-
- // check biases
- BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
- BOOST_CHECK(biases.has_value() == m_Biases.has_value());
-
- if (biases.has_value() && m_Biases.has_value())
- {
- CompareConstTensor(biases.value(), m_Biases.value());
- }
- }
-
- private:
- armnn::ConstTensor m_Weights;
- armnn::Optional<armnn::ConstTensor> m_Biases;
- };
-
using namespace armnn;
const std::string layerName("depwiseConvolution2dWithPerAxis");
@@ -933,14 +530,14 @@ BOOST_AUTO_TEST_CASE(SerializeDepthwiseConvolution2dWithPerAxisParams)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- DepthwiseConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
- deserializedNetwork->Accept(verifier);
+ const std::vector<armnn::ConstTensor>& constants {weights, biases};
+ LayerVerifierBaseWithDescriptorAndConstants<armnn::DepthwiseConvolution2dDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, descriptor, constants);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeDequantize)
{
- DECLARE_LAYER_VERIFIER_CLASS(Dequantize)
-
const std::string layerName("dequantize");
const armnn::TensorInfo inputInfo({ 1, 5, 2, 3 }, armnn::DataType::QAsymmU8, 0.5f, 1);
const armnn::TensorInfo outputInfo({ 1, 5, 2, 3 }, armnn::DataType::Float32);
@@ -959,39 +556,12 @@ BOOST_AUTO_TEST_CASE(SerializeDequantize)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- DequantizeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeDeserializeDetectionPostProcess)
{
- using Descriptor = armnn::DetectionPostProcessDescriptor;
- class DetectionPostProcessLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
- {
- public:
- DetectionPostProcessLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const Descriptor& descriptor,
- const armnn::ConstTensor& anchors)
- : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
- , m_Anchors(anchors) {}
-
- void VisitDetectionPostProcessLayer(const armnn::IConnectableLayer* layer,
- const Descriptor& descriptor,
- const armnn::ConstTensor& anchors,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
-
- CompareConstTensor(anchors, m_Anchors);
- }
-
- private:
- armnn::ConstTensor m_Anchors;
- };
-
const std::string layerName("detectionPostProcess");
const std::vector<armnn::TensorInfo> inputInfos({
@@ -1051,14 +621,14 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeDetectionPostProcess)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- DetectionPostProcessLayerVerifier verifier(layerName, inputInfos, outputInfos, descriptor, anchors);
- deserializedNetwork->Accept(verifier);
+ const std::vector<armnn::ConstTensor>& constants {anchors};
+ LayerVerifierBaseWithDescriptorAndConstants<armnn::DetectionPostProcessDescriptor> verifier(
+ layerName, inputInfos, outputInfos, descriptor, constants);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeDivision)
{
- DECLARE_LAYER_VERIFIER_CLASS(Division)
-
const std::string layerName("division");
const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
@@ -1079,131 +649,41 @@ BOOST_AUTO_TEST_CASE(SerializeDivision)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- DivisionLayerVerifier verifier(layerName, {info, info}, {info});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {info, info}, {info});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
-class EqualLayerVerifier : public LayerVerifierBase
+BOOST_AUTO_TEST_CASE(SerializeDeserializeEqual)
{
-public:
- EqualLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos)
- : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
-
- void VisitComparisonLayer(const armnn::IConnectableLayer* layer,
- const armnn::ComparisonDescriptor& descriptor,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- BOOST_CHECK(descriptor.m_Operation == armnn::ComparisonOperation::Equal);
- }
-
- void VisitEqualLayer(const armnn::IConnectableLayer*, const char*) override
- {
- throw armnn::Exception("EqualLayer should have translated to ComparisonLayer");
- }
-};
-
-// NOTE: Until the deprecated AddEqualLayer disappears this test checks that calling
-// AddEqualLayer places a ComparisonLayer into the serialized format and that
-// when this deserialises we have a ComparisonLayer
-BOOST_AUTO_TEST_CASE(SerializeEqual)
-{
- const std::string layerName("equal");
-
- const armnn::TensorShape shape{2, 1, 2, 4};
-
- const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32);
- const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean);
+ const std::string layerName("EqualLayer");
+ const armnn::TensorInfo inputTensorInfo1 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
+ const armnn::TensorInfo inputTensorInfo2 = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Float32);
+ const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo({2, 1, 2, 4}, armnn::DataType::Boolean);
armnn::INetworkPtr network = armnn::INetwork::Create();
- armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
- armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const inputLayer2 = network->AddInputLayer(1);
ARMNN_NO_DEPRECATE_WARN_BEGIN
armnn::IConnectableLayer* const equalLayer = network->AddEqualLayer(layerName.c_str());
ARMNN_NO_DEPRECATE_WARN_END
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
- inputLayer0->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
- inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
+ inputLayer1->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(0));
+ inputLayer1->GetOutputSlot(0).SetTensorInfo(inputTensorInfo1);
+ inputLayer2->GetOutputSlot(0).Connect(equalLayer->GetInputSlot(1));
+ inputLayer2->GetOutputSlot(0).SetTensorInfo(inputTensorInfo2);
equalLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
-
- inputLayer0->GetOutputSlot(0).SetTensorInfo(inputInfo);
- inputLayer1->GetOutputSlot(0).SetTensorInfo(inputInfo);
- equalLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
+ equalLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- EqualLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
- deserializedNetwork->Accept(verifier);
-}
-
-BOOST_AUTO_TEST_CASE(EnsureEqualBackwardCompatibility)
-{
- // The hex data below is a flat buffer containing a simple network with two inputs,
- // an EqualLayer (now deprecated) and an output
- //
- // This test verifies that we can still deserialize this old-style model by replacing
- // the EqualLayer with an equivalent ComparisonLayer
- const std::vector<uint8_t> equalModel =
- {
- 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
- 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0xCC, 0x01, 0x00, 0x00, 0x20, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
- 0x60, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
- 0x00, 0x00, 0x06, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xEA, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
- 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x64, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xB4, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
- 0x00, 0x13, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
- 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
- 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x71, 0x75, 0x61, 0x6C, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
- 0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
- 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x04, 0x08, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00,
- 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
- 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
- 0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00,
- 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
- 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
- 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
- 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
- 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00
- };
-
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(equalModel.begin(), equalModel.end()));
- BOOST_CHECK(deserializedNetwork);
-
- const armnn::TensorShape shape{ 2, 1, 2, 4 };
-
- const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32);
- const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean);
-
- EqualLayerVerifier verifier("equal", { inputInfo, inputInfo }, { outputInfo });
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {inputTensorInfo1, inputTensorInfo2}, {outputTensorInfo});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeFill)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Fill)
-
const std::string layerName("fill");
const armnn::TensorInfo inputInfo({4}, armnn::DataType::Signed32);
const armnn::TensorInfo outputInfo({1, 3, 3, 1}, armnn::DataType::Float32);
@@ -1224,15 +704,13 @@ BOOST_AUTO_TEST_CASE(SerializeFill)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- FillLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
+ LayerVerifierBaseWithDescriptor<armnn::FillDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeFloor)
{
- DECLARE_LAYER_VERIFIER_CLASS(Floor)
-
const std::string layerName("floor");
const armnn::TensorInfo info({4,4}, armnn::DataType::Float32);
@@ -1250,51 +728,12 @@ BOOST_AUTO_TEST_CASE(SerializeFloor)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- FloorLayerVerifier verifier(layerName, {info}, {info});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {info}, {info});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeFullyConnected)
{
- using Descriptor = armnn::FullyConnectedDescriptor;
- class FullyConnectedLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
- {
- public:
- FullyConnectedLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weight,
- const armnn::Optional<armnn::ConstTensor>& bias)
- : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
- , m_Weight(weight)
- , m_Bias(bias) {}
-
- void VisitFullyConnectedLayer(const armnn::IConnectableLayer* layer,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weight,
- const armnn::Optional<armnn::ConstTensor>& bias,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
-
- CompareConstTensor(weight, m_Weight);
-
- BOOST_TEST(bias.has_value() == descriptor.m_BiasEnabled);
- BOOST_TEST(bias.has_value() == m_Bias.has_value());
-
- if (bias.has_value() && m_Bias.has_value())
- {
- CompareConstTensor(bias.value(), m_Bias.value());
- }
- }
-
- private:
- armnn::ConstTensor m_Weight;
- armnn::Optional<armnn::ConstTensor> m_Bias;
- };
-
const std::string layerName("fullyConnected");
const armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
@@ -1328,8 +767,10 @@ BOOST_AUTO_TEST_CASE(SerializeFullyConnected)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- FullyConnectedLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
- deserializedNetwork->Accept(verifier);
+ const std::vector<armnn::ConstTensor> constants {weights, biases};
+ LayerVerifierBaseWithDescriptorAndConstants<armnn::FullyConnectedDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, descriptor, constants);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeGather)
@@ -1344,17 +785,26 @@ BOOST_AUTO_TEST_CASE(SerializeGather)
const GatherDescriptor& descriptor)
: LayerVerifierBaseWithDescriptor<GatherDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
- void VisitGatherLayer(const armnn::IConnectableLayer* layer,
- const GatherDescriptor& descriptor,
- const char *name) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- VerifyNameAndConnections(layer, name);
- BOOST_CHECK(descriptor.m_Axis == m_Descriptor.m_Axis);
+ armnn::IgnoreUnused(constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ case armnn::LayerType::Constant: break;
+ default:
+ {
+ VerifyNameAndConnections(layer, name);
+ const GatherDescriptor& layerDescriptor = static_cast<const GatherDescriptor&>(descriptor);
+ BOOST_CHECK(layerDescriptor.m_Axis == m_Descriptor.m_Axis);
+ }
+ }
}
-
- void VisitConstantLayer(const armnn::IConnectableLayer*,
- const armnn::ConstTensor&,
- const char*) override {}
};
const std::string layerName("gather");
@@ -1390,35 +840,14 @@ BOOST_AUTO_TEST_CASE(SerializeGather)
BOOST_CHECK(deserializedNetwork);
GatherLayerVerifier verifier(layerName, {paramsInfo, indicesInfo}, {outputInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
-class GreaterLayerVerifier : public LayerVerifierBase
-{
-public:
- GreaterLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos)
- : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
-
- void VisitComparisonLayer(const armnn::IConnectableLayer* layer,
- const armnn::ComparisonDescriptor& descriptor,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- BOOST_CHECK(descriptor.m_Operation == armnn::ComparisonOperation::Greater);
- }
-
- void VisitGreaterLayer(const armnn::IConnectableLayer*, const char*) override
- {
- throw armnn::Exception("GreaterLayer should have translated to ComparisonLayer");
- }
-};
// NOTE: Until the deprecated AddGreaterLayer disappears this test checks that calling
// AddGreaterLayer places a ComparisonLayer into the serialized format and that
// when this deserialises we have a ComparisonLayer
-BOOST_AUTO_TEST_CASE(SerializeGreater)
+BOOST_AUTO_TEST_CASE(SerializeGreaterDeprecated)
{
const std::string layerName("greater");
@@ -1446,74 +875,13 @@ BOOST_AUTO_TEST_CASE(SerializeGreater)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- GreaterLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, { inputInfo, inputInfo }, { outputInfo });
+ deserializedNetwork->ExecuteStrategy(verifier);
}
-BOOST_AUTO_TEST_CASE(EnsureGreaterBackwardCompatibility)
-{
- // The hex data below is a flat buffer containing a simple network with two inputs,
- // an GreaterLayer (now deprecated) and an output
- //
- // This test verifies that we can still deserialize this old-style model by replacing
- // the GreaterLayer with an equivalent ComparisonLayer
- const std::vector<uint8_t> greaterModel =
- {
- 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
- 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0xCC, 0x01, 0x00, 0x00, 0x20, 0x01, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x02, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
- 0x60, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xFE, 0xFF, 0xFF, 0x04, 0x00,
- 0x00, 0x00, 0x06, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xEA, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00,
- 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x64, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xB4, 0xFE, 0xFF, 0xFF, 0x00, 0x00,
- 0x00, 0x19, 0x04, 0x00, 0x00, 0x00, 0x52, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x36, 0xFF, 0xFF, 0xFF,
- 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x1C, 0x00,
- 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x67, 0x72, 0x65, 0x61, 0x74, 0x65, 0x72, 0x00, 0x02, 0x00, 0x00, 0x00,
- 0x5C, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x34, 0xFF,
- 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x92, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x04, 0x08, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
- 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
- 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0E, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0E, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00,
- 0x00, 0x00, 0x66, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00,
- 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
- 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00,
- 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00,
- 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00,
- 0x07, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
- 0x02, 0x00, 0x00, 0x00
- };
-
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(std::string(greaterModel.begin(), greaterModel.end()));
- BOOST_CHECK(deserializedNetwork);
-
- const armnn::TensorShape shape{ 1, 2, 2, 2 };
-
- const armnn::TensorInfo inputInfo = armnn::TensorInfo(shape, armnn::DataType::Float32);
- const armnn::TensorInfo outputInfo = armnn::TensorInfo(shape, armnn::DataType::Boolean);
-
- GreaterLayerVerifier verifier("greater", { inputInfo, inputInfo }, { outputInfo });
- deserializedNetwork->Accept(verifier);
-}
BOOST_AUTO_TEST_CASE(SerializeInstanceNormalization)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(InstanceNormalization)
-
const std::string layerName("instanceNormalization");
const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32);
@@ -1538,12 +906,11 @@ BOOST_AUTO_TEST_CASE(SerializeInstanceNormalization)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- InstanceNormalizationLayerVerifier verifier(layerName, {info}, {info}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::InstanceNormalizationDescriptor> verifier(
+ layerName, {info}, {info}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
-DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(L2Normalization)
-
BOOST_AUTO_TEST_CASE(SerializeL2Normalization)
{
const std::string l2NormLayerName("l2Normalization");
@@ -1567,8 +934,9 @@ BOOST_AUTO_TEST_CASE(SerializeL2Normalization)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- L2NormalizationLayerVerifier verifier(l2NormLayerName, {info}, {info}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::L2NormalizationDescriptor> verifier(
+ l2NormLayerName, {info}, {info}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(EnsureL2NormalizationBackwardCompatibility)
@@ -1623,14 +991,13 @@ BOOST_AUTO_TEST_CASE(EnsureL2NormalizationBackwardCompatibility)
// Since this variable does not exist in the l2NormalizationModel dump, the default value will be loaded
desc.m_Eps = 1e-12f;
- L2NormalizationLayerVerifier verifier(layerName, {inputInfo}, {inputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::L2NormalizationDescriptor> verifier(
+ layerName, {inputInfo}, {inputInfo}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeLogicalBinary)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(LogicalBinary)
-
const std::string layerName("logicalBinaryAnd");
const armnn::TensorShape shape{2, 1, 2, 2};
@@ -1657,14 +1024,13 @@ BOOST_AUTO_TEST_CASE(SerializeLogicalBinary)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- LogicalBinaryLayerVerifier verifier(layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::LogicalBinaryDescriptor> verifier(
+ layerName, { inputInfo, inputInfo }, { outputInfo }, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeLogicalUnary)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(ElementwiseUnary)
-
const std::string layerName("elementwiseUnaryLogicalNot");
const armnn::TensorShape shape{2, 1, 2, 2};
@@ -1690,15 +1056,14 @@ BOOST_AUTO_TEST_CASE(SerializeLogicalUnary)
BOOST_CHECK(deserializedNetwork);
- ElementwiseUnaryLayerVerifier verifier(layerName, { inputInfo }, { outputInfo }, descriptor);
+ LayerVerifierBaseWithDescriptor<armnn::ElementwiseUnaryDescriptor> verifier(
+ layerName, { inputInfo }, { outputInfo }, descriptor);
- deserializedNetwork->Accept(verifier);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeLogSoftmax)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(LogSoftmax)
-
const std::string layerName("log_softmax");
const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32);
@@ -1720,14 +1085,12 @@ BOOST_AUTO_TEST_CASE(SerializeLogSoftmax)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- LogSoftmaxLayerVerifier verifier(layerName, {info}, {info}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::LogSoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeMaximum)
{
- DECLARE_LAYER_VERIFIER_CLASS(Maximum)
-
const std::string layerName("maximum");
const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
@@ -1748,14 +1111,12 @@ BOOST_AUTO_TEST_CASE(SerializeMaximum)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- MaximumLayerVerifier verifier(layerName, {info, info}, {info});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {info, info}, {info});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeMean)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Mean)
-
const std::string layerName("mean");
const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
@@ -1778,14 +1139,12 @@ BOOST_AUTO_TEST_CASE(SerializeMean)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- MeanLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::MeanDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeMerge)
{
- DECLARE_LAYER_VERIFIER_CLASS(Merge)
-
const std::string layerName("merge");
const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
@@ -1806,8 +1165,8 @@ BOOST_AUTO_TEST_CASE(SerializeMerge)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- MergeLayerVerifier verifier(layerName, {info, info}, {info});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {info, info}, {info});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
class MergerLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>
@@ -1819,19 +1178,35 @@ public:
const armnn::OriginsDescriptor& descriptor)
: LayerVerifierBaseWithDescriptor<armnn::OriginsDescriptor>(layerName, inputInfos, outputInfos, descriptor) {}
- void VisitMergerLayer(const armnn::IConnectableLayer*,
- const armnn::OriginsDescriptor&,
- const char*) override
- {
- throw armnn::Exception("MergerLayer should have translated to ConcatLayer");
- }
-
- void VisitConcatLayer(const armnn::IConnectableLayer* layer,
- const armnn::OriginsDescriptor& descriptor,
- const char* name) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ case armnn::LayerType::Merge:
+ {
+ throw armnn::Exception("MergerLayer should have translated to ConcatLayer");
+ break;
+ }
+ case armnn::LayerType::Concat:
+ {
+ VerifyNameAndConnections(layer, name);
+ const armnn::MergerDescriptor& layerDescriptor =
+ static_cast<const armnn::MergerDescriptor&>(descriptor);
+ VerifyDescriptor(layerDescriptor);
+ break;
+ }
+ default:
+ {
+ throw armnn::Exception("Unexpected layer type in Merge test model");
+ }
+ }
}
};
@@ -1870,7 +1245,7 @@ BOOST_AUTO_TEST_CASE(SerializeMerger)
BOOST_CHECK(deserializedNetwork);
MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(EnsureMergerLayerBackwardCompatibility)
@@ -1939,7 +1314,7 @@ BOOST_AUTO_TEST_CASE(EnsureMergerLayerBackwardCompatibility)
armnn::CreateDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
MergerLayerVerifier verifier("merger", { inputInfo, inputInfo }, { outputInfo }, descriptor);
- deserializedNetwork->Accept(verifier);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeConcat)
@@ -1974,13 +1349,11 @@ BOOST_AUTO_TEST_CASE(SerializeConcat)
// NOTE: using the MergerLayerVerifier to ensure that it is a concat layer and not a
// merger layer that gets placed into the graph.
MergerLayerVerifier verifier(layerName, {inputInfo, inputInfo}, {outputInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeMinimum)
{
- DECLARE_LAYER_VERIFIER_CLASS(Minimum)
-
const std::string layerName("minimum");
const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
@@ -2001,14 +1374,12 @@ BOOST_AUTO_TEST_CASE(SerializeMinimum)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- MinimumLayerVerifier verifier(layerName, {info, info}, {info});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {info, info}, {info});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeMultiplication)
{
- DECLARE_LAYER_VERIFIER_CLASS(Multiplication)
-
const std::string layerName("multiplication");
const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
@@ -2029,14 +1400,12 @@ BOOST_AUTO_TEST_CASE(SerializeMultiplication)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- MultiplicationLayerVerifier verifier(layerName, {info, info}, {info});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {info, info}, {info});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializePrelu)
{
- DECLARE_LAYER_VERIFIER_CLASS(Prelu)
-
const std::string layerName("prelu");
armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32);
@@ -2060,14 +1429,12 @@ BOOST_AUTO_TEST_CASE(SerializePrelu)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- PreluLayerVerifier verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeNormalization)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Normalization)
-
const std::string layerName("normalization");
const armnn::TensorInfo info({2, 1, 2, 2}, armnn::DataType::Float32);
@@ -2092,12 +1459,10 @@ BOOST_AUTO_TEST_CASE(SerializeNormalization)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- NormalizationLayerVerifier verifier(layerName, {info}, {info}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::NormalizationDescriptor> verifier(layerName, {info}, {info}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
-DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Pad)
-
BOOST_AUTO_TEST_CASE(SerializePad)
{
const std::string layerName("pad");
@@ -2120,8 +1485,11 @@ BOOST_AUTO_TEST_CASE(SerializePad)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- PadLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::PadDescriptor> verifier(layerName,
+ {inputTensorInfo},
+ {outputTensorInfo},
+ desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(EnsurePadBackwardCompatibility)
@@ -2174,14 +1542,12 @@ BOOST_AUTO_TEST_CASE(EnsurePadBackwardCompatibility)
armnn::PadDescriptor descriptor({{ 0, 0 }, { 1, 0 }, { 1, 1 }, { 1, 2 }});
- PadLayerVerifier verifier("pad", { inputInfo }, { outputInfo }, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::PadDescriptor> verifier("pad", { inputInfo }, { outputInfo }, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializePermute)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Permute)
-
const std::string layerName("permute");
const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
@@ -2202,14 +1568,13 @@ BOOST_AUTO_TEST_CASE(SerializePermute)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- PermuteLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::PermuteDescriptor> verifier(
+ layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializePooling2d)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Pooling2d)
-
const std::string layerName("pooling2d");
const armnn::TensorInfo inputInfo({1, 2, 2, 1}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({1, 1, 1, 1}, armnn::DataType::Float32);
@@ -2242,14 +1607,13 @@ BOOST_AUTO_TEST_CASE(SerializePooling2d)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- Pooling2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::Pooling2dDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeQuantize)
{
- DECLARE_LAYER_VERIFIER_CLASS(Quantize)
-
const std::string layerName("quantize");
const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
@@ -2267,14 +1631,12 @@ BOOST_AUTO_TEST_CASE(SerializeQuantize)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- QuantizeLayerVerifier verifier(layerName, {info}, {info});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {info}, {info});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeRank)
{
- DECLARE_LAYER_VERIFIER_CLASS(Rank)
-
const std::string layerName("rank");
const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({1}, armnn::DataType::Signed32);
@@ -2293,14 +1655,12 @@ BOOST_AUTO_TEST_CASE(SerializeRank)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- RankLayerVerifier verifier(layerName, {inputInfo}, {outputInfo});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {inputInfo}, {outputInfo});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeReduceSum)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Reduce)
-
const std::string layerName("Reduce_Sum");
const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32);
@@ -2323,14 +1683,12 @@ BOOST_AUTO_TEST_CASE(SerializeReduceSum)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- ReduceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::ReduceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeReshape)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Reshape)
-
const std::string layerName("reshape");
const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({3, 3}, armnn::DataType::Float32);
@@ -2351,14 +1709,13 @@ BOOST_AUTO_TEST_CASE(SerializeReshape)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- ReshapeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::ReshapeDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeResize)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Resize)
-
const std::string layerName("resize");
const armnn::TensorInfo inputInfo = armnn::TensorInfo({1, 3, 5, 5}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo = armnn::TensorInfo({1, 3, 2, 4}, armnn::DataType::Float32);
@@ -2384,8 +1741,8 @@ BOOST_AUTO_TEST_CASE(SerializeResize)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- ResizeLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::ResizeDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
class ResizeBilinearLayerVerifier : public LayerVerifierBaseWithDescriptor<armnn::ResizeBilinearDescriptor>
@@ -2398,25 +1755,36 @@ public:
: LayerVerifierBaseWithDescriptor<armnn::ResizeBilinearDescriptor>(
layerName, inputInfos, outputInfos, descriptor) {}
- void VisitResizeLayer(const armnn::IConnectableLayer* layer,
- const armnn::ResizeDescriptor& descriptor,
- const char* name) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- VerifyNameAndConnections(layer, name);
-
- BOOST_CHECK(descriptor.m_Method == armnn::ResizeMethod::Bilinear);
- BOOST_CHECK(descriptor.m_TargetWidth == m_Descriptor.m_TargetWidth);
- BOOST_CHECK(descriptor.m_TargetHeight == m_Descriptor.m_TargetHeight);
- BOOST_CHECK(descriptor.m_DataLayout == m_Descriptor.m_DataLayout);
- BOOST_CHECK(descriptor.m_AlignCorners == m_Descriptor.m_AlignCorners);
- BOOST_CHECK(descriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters);
- }
-
- void VisitResizeBilinearLayer(const armnn::IConnectableLayer*,
- const armnn::ResizeBilinearDescriptor&,
- const char*) override
- {
- throw armnn::Exception("ResizeBilinearLayer should have translated to ResizeLayer");
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ case armnn::LayerType::Resize:
+ {
+ VerifyNameAndConnections(layer, name);
+ const armnn::ResizeDescriptor& layerDescriptor =
+ static_cast<const armnn::ResizeDescriptor&>(descriptor);
+ BOOST_CHECK(layerDescriptor.m_Method == armnn::ResizeMethod::Bilinear);
+ BOOST_CHECK(layerDescriptor.m_TargetWidth == m_Descriptor.m_TargetWidth);
+ BOOST_CHECK(layerDescriptor.m_TargetHeight == m_Descriptor.m_TargetHeight);
+ BOOST_CHECK(layerDescriptor.m_DataLayout == m_Descriptor.m_DataLayout);
+ BOOST_CHECK(layerDescriptor.m_AlignCorners == m_Descriptor.m_AlignCorners);
+ BOOST_CHECK(layerDescriptor.m_HalfPixelCenters == m_Descriptor.m_HalfPixelCenters);
+ break;
+ }
+ default:
+ {
+ throw armnn::Exception("Unexpected layer type in test model. ResizeBiliniar "
+ "should have translated to Resize");
+ }
+ }
}
};
@@ -2452,7 +1820,7 @@ BOOST_AUTO_TEST_CASE(SerializeResizeBilinear)
BOOST_CHECK(deserializedNetwork);
ResizeBilinearLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(EnsureResizeBilinearBackwardCompatibility)
@@ -2508,13 +1876,11 @@ BOOST_AUTO_TEST_CASE(EnsureResizeBilinearBackwardCompatibility)
descriptor.m_TargetHeight = 2u;
ResizeBilinearLayerVerifier verifier("resizeBilinear", { inputInfo }, { outputInfo }, descriptor);
- deserializedNetwork->Accept(verifier);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeSlice)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Slice)
-
const std::string layerName{"slice"};
const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
@@ -2537,14 +1903,12 @@ BOOST_AUTO_TEST_CASE(SerializeSlice)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- SliceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::SliceDescriptor> verifier(layerName, {inputInfo}, {outputInfo}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeSoftmax)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Softmax)
-
const std::string layerName("softmax");
const armnn::TensorInfo info({1, 10}, armnn::DataType::Float32);
@@ -2565,14 +1929,12 @@ BOOST_AUTO_TEST_CASE(SerializeSoftmax)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- SoftmaxLayerVerifier verifier(layerName, {info}, {info}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::SoftmaxDescriptor> verifier(layerName, {info}, {info}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeSpaceToBatchNd)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(SpaceToBatchNd)
-
const std::string layerName("spaceToBatchNd");
const armnn::TensorInfo inputInfo({2, 1, 2, 4}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({8, 1, 1, 3}, armnn::DataType::Float32);
@@ -2596,14 +1958,13 @@ BOOST_AUTO_TEST_CASE(SerializeSpaceToBatchNd)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- SpaceToBatchNdLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::SpaceToBatchNdDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeSpaceToDepth)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(SpaceToDepth)
-
const std::string layerName("spaceToDepth");
const armnn::TensorInfo inputInfo ({ 1, 16, 8, 3 }, armnn::DataType::Float32);
@@ -2627,14 +1988,13 @@ BOOST_AUTO_TEST_CASE(SerializeSpaceToDepth)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- SpaceToDepthLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::SpaceToDepthDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeSplitter)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Splitter)
-
const unsigned int numViews = 3;
const unsigned int numDimensions = 4;
const unsigned int inputShape[] = {1, 18, 4, 4};
@@ -2682,14 +2042,13 @@ BOOST_AUTO_TEST_CASE(SerializeSplitter)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- SplitterLayerVerifier verifier(layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::ViewsDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo, outputInfo, outputInfo}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeStack)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Stack)
-
const std::string layerName("stack");
armnn::TensorInfo inputTensorInfo ({4, 3, 5}, armnn::DataType::Float32);
@@ -2714,14 +2073,13 @@ BOOST_AUTO_TEST_CASE(SerializeStack)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- StackLayerVerifier verifier(layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::StackDescriptor> verifier(
+ layerName, {inputTensorInfo, inputTensorInfo}, {outputTensorInfo}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeStandIn)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(StandIn)
-
const std::string layerName("standIn");
armnn::TensorInfo tensorInfo({ 1u }, armnn::DataType::Float32);
@@ -2749,14 +2107,13 @@ BOOST_AUTO_TEST_CASE(SerializeStandIn)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- StandInLayerVerifier verifier(layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::StandInDescriptor> verifier(
+ layerName, { tensorInfo, tensorInfo }, { tensorInfo, tensorInfo }, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeStridedSlice)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(StridedSlice)
-
const std::string layerName("stridedSlice");
const armnn::TensorInfo inputInfo = armnn::TensorInfo({3, 2, 3, 1}, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo = armnn::TensorInfo({3, 1}, armnn::DataType::Float32);
@@ -2780,14 +2137,13 @@ BOOST_AUTO_TEST_CASE(SerializeStridedSlice)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- StridedSliceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, desc);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::StridedSliceDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, desc);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeSubtraction)
{
- DECLARE_LAYER_VERIFIER_CLASS(Subtraction)
-
const std::string layerName("subtraction");
const armnn::TensorInfo info({ 1, 4 }, armnn::DataType::Float32);
@@ -2808,8 +2164,8 @@ BOOST_AUTO_TEST_CASE(SerializeSubtraction)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- SubtractionLayerVerifier verifier(layerName, {info, info}, {info});
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBase verifier(layerName, {info, info}, {info});
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeSwitch)
@@ -2820,16 +2176,31 @@ BOOST_AUTO_TEST_CASE(SerializeSwitch)
SwitchLayerVerifier(const std::string& layerName,
const std::vector<armnn::TensorInfo>& inputInfos,
const std::vector<armnn::TensorInfo>& outputInfos)
- : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
+ : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
- void VisitSwitchLayer(const armnn::IConnectableLayer* layer, const char* name) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- VerifyNameAndConnections(layer, name);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ case armnn::LayerType::Constant: break;
+ case armnn::LayerType::Switch:
+ {
+ VerifyNameAndConnections(layer, name);
+ break;
+ }
+ default:
+ {
+ throw armnn::Exception("Unexpected layer type in Switch test model");
+ }
+ }
}
-
- void VisitConstantLayer(const armnn::IConnectableLayer*,
- const armnn::ConstTensor&,
- const char*) override {}
};
const std::string layerName("switch");
@@ -2859,13 +2230,11 @@ BOOST_AUTO_TEST_CASE(SerializeSwitch)
BOOST_CHECK(deserializedNetwork);
SwitchLayerVerifier verifier(layerName, {info, info}, {info, info});
- deserializedNetwork->Accept(verifier);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeTranspose)
{
- DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Transpose)
-
const std::string layerName("transpose");
const armnn::TensorInfo inputTensorInfo({4, 3, 2, 1}, armnn::DataType::Float32);
const armnn::TensorInfo outputTensorInfo({1, 2, 3, 4}, armnn::DataType::Float32);
@@ -2886,54 +2255,13 @@ BOOST_AUTO_TEST_CASE(SerializeTranspose)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- TransposeLayerVerifier verifier(layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
- deserializedNetwork->Accept(verifier);
+ LayerVerifierBaseWithDescriptor<armnn::TransposeDescriptor> verifier(
+ layerName, {inputTensorInfo}, {outputTensorInfo}, descriptor);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeTransposeConvolution2d)
{
- using Descriptor = armnn::TransposeConvolution2dDescriptor;
- class TransposeConvolution2dLayerVerifier : public LayerVerifierBaseWithDescriptor<Descriptor>
- {
- public:
- TransposeConvolution2dLayerVerifier(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases)
- : LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
- , m_Weights(weights)
- , m_Biases(biases)
- {}
-
- void VisitTransposeConvolution2dLayer(const armnn::IConnectableLayer* layer,
- const Descriptor& descriptor,
- const armnn::ConstTensor& weights,
- const armnn::Optional<armnn::ConstTensor>& biases,
- const char* name) override
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
-
- // check weights
- CompareConstTensor(weights, m_Weights);
-
- // check biases
- BOOST_CHECK(biases.has_value() == descriptor.m_BiasEnabled);
- BOOST_CHECK(biases.has_value() == m_Biases.has_value());
-
- if (biases.has_value() && m_Biases.has_value())
- {
- CompareConstTensor(biases.value(), m_Biases.value());
- }
- }
-
- private:
- armnn::ConstTensor m_Weights;
- armnn::Optional<armnn::ConstTensor> m_Biases;
- };
-
const std::string layerName("transposeConvolution2d");
const armnn::TensorInfo inputInfo ({ 1, 7, 7, 1 }, armnn::DataType::Float32);
const armnn::TensorInfo outputInfo({ 1, 9, 9, 1 }, armnn::DataType::Float32);
@@ -2975,8 +2303,10 @@ BOOST_AUTO_TEST_CASE(SerializeTransposeConvolution2d)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
- TransposeConvolution2dLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor, weights, biases);
- deserializedNetwork->Accept(verifier);
+ const std::vector<armnn::ConstTensor> constants {weights, biases};
+ LayerVerifierBaseWithDescriptorAndConstants<armnn::TransposeConvolution2dDescriptor> verifier(
+ layerName, {inputInfo}, {outputInfo}, descriptor, constants);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_CASE(SerializeDeserializeNonLinearNetwork)
@@ -2991,16 +2321,31 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeNonLinearNetwork)
: LayerVerifierBase(layerName, inputInfos, outputInfos)
, m_LayerInput(layerInput) {}
- void VisitConstantLayer(const armnn::IConnectableLayer* layer,
- const armnn::ConstTensor& input,
- const char* name) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- VerifyNameAndConnections(layer, name);
- CompareConstTensor(input, m_LayerInput);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Input: break;
+ case armnn::LayerType::Output: break;
+ case armnn::LayerType::Addition: break;
+ case armnn::LayerType::Constant:
+ {
+ VerifyNameAndConnections(layer, name);
+ CompareConstTensor(constants.at(0), m_LayerInput);
+ break;
+ }
+ default:
+ {
+ throw armnn::Exception("Unexpected layer type in test model");
+ }
+ }
}
- void VisitAdditionLayer(const armnn::IConnectableLayer*, const char*) override {}
-
private:
armnn::ConstTensor m_LayerInput;
};
@@ -3029,2125 +2374,7 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeNonLinearNetwork)
BOOST_CHECK(deserializedNetwork);
ConstantLayerVerifier verifier(layerName, {}, {info}, constTensor);
- deserializedNetwork->Accept(verifier);
-}
-
-class VerifyLstmLayer : public LayerVerifierBaseWithDescriptor<armnn::LstmDescriptor>
-{
-public:
- VerifyLstmLayer(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const armnn::LstmDescriptor& descriptor,
- const armnn::LstmInputParams& inputParams)
- : LayerVerifierBaseWithDescriptor<armnn::LstmDescriptor>(layerName, inputInfos, outputInfos, descriptor)
- , m_InputParams(inputParams) {}
-
- void VisitLstmLayer(const armnn::IConnectableLayer* layer,
- const armnn::LstmDescriptor& descriptor,
- const armnn::LstmInputParams& params,
- const char* name)
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
- VerifyInputParameters(params);
- }
-
-protected:
- void VerifyInputParameters(const armnn::LstmInputParams& params)
- {
- VerifyConstTensors(
- "m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
- VerifyConstTensors(
- "m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
- VerifyConstTensors(
- "m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
- VerifyConstTensors(
- "m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
- VerifyConstTensors(
- "m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
- VerifyConstTensors(
- "m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
- VerifyConstTensors(
- "m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
- VerifyConstTensors(
- "m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
- VerifyConstTensors(
- "m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights);
- VerifyConstTensors(
- "m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights);
- VerifyConstTensors(
- "m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights);
- VerifyConstTensors(
- "m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias);
- VerifyConstTensors(
- "m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
- VerifyConstTensors(
- "m_CellBias", m_InputParams.m_CellBias, params.m_CellBias);
- VerifyConstTensors(
- "m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
- VerifyConstTensors(
- "m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights);
- VerifyConstTensors(
- "m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias);
- VerifyConstTensors(
- "m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights);
- VerifyConstTensors(
- "m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights);
- VerifyConstTensors(
- "m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights);
- VerifyConstTensors(
- "m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights);
- }
-
-private:
- armnn::LstmInputParams m_InputParams;
-};
-
-BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmCifgPeepholeNoProjection)
-{
- armnn::LstmDescriptor descriptor;
- descriptor.m_ActivationFunc = 4;
- descriptor.m_ClippingThresProj = 0.0f;
- descriptor.m_ClippingThresCell = 0.0f;
- descriptor.m_CifgEnabled = true; // if this is true then we DON'T need to set the OptCifgParams
- descriptor.m_ProjectionEnabled = false;
- descriptor.m_PeepholeEnabled = true;
-
- const uint32_t batchSize = 1;
- const uint32_t inputSize = 2;
- const uint32_t numUnits = 4;
- const uint32_t outputSize = numUnits;
-
- armnn::TensorInfo inputWeightsInfo1({numUnits, inputSize}, armnn::DataType::Float32);
- std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
- armnn::ConstTensor inputToForgetWeights(inputWeightsInfo1, inputToForgetWeightsData);
-
- std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
- armnn::ConstTensor inputToCellWeights(inputWeightsInfo1, inputToCellWeightsData);
-
- std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
- armnn::ConstTensor inputToOutputWeights(inputWeightsInfo1, inputToOutputWeightsData);
-
- armnn::TensorInfo inputWeightsInfo2({numUnits, outputSize}, armnn::DataType::Float32);
- std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
- armnn::ConstTensor recurrentToForgetWeights(inputWeightsInfo2, recurrentToForgetWeightsData);
-
- std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
- armnn::ConstTensor recurrentToCellWeights(inputWeightsInfo2, recurrentToCellWeightsData);
-
- std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
- armnn::ConstTensor recurrentToOutputWeights(inputWeightsInfo2, recurrentToOutputWeightsData);
-
- armnn::TensorInfo inputWeightsInfo3({numUnits}, armnn::DataType::Float32);
- std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
- armnn::ConstTensor cellToForgetWeights(inputWeightsInfo3, cellToForgetWeightsData);
-
- std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
- armnn::ConstTensor cellToOutputWeights(inputWeightsInfo3, cellToOutputWeightsData);
-
- std::vector<float> forgetGateBiasData(numUnits, 1.0f);
- armnn::ConstTensor forgetGateBias(inputWeightsInfo3, forgetGateBiasData);
-
- std::vector<float> cellBiasData(numUnits, 0.0f);
- armnn::ConstTensor cellBias(inputWeightsInfo3, cellBiasData);
-
- std::vector<float> outputGateBiasData(numUnits, 0.0f);
- armnn::ConstTensor outputGateBias(inputWeightsInfo3, outputGateBiasData);
-
- armnn::LstmInputParams params;
- params.m_InputToForgetWeights = &inputToForgetWeights;
- params.m_InputToCellWeights = &inputToCellWeights;
- params.m_InputToOutputWeights = &inputToOutputWeights;
- params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
- params.m_RecurrentToCellWeights = &recurrentToCellWeights;
- params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
- params.m_ForgetGateBias = &forgetGateBias;
- params.m_CellBias = &cellBias;
- params.m_OutputGateBias = &outputGateBias;
- params.m_CellToForgetWeights = &cellToForgetWeights;
- params.m_CellToOutputWeights = &cellToOutputWeights;
-
- armnn::INetworkPtr network = armnn::INetwork::Create();
- armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
- armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
- armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
- const std::string layerName("lstm");
- armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
- armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
- armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
- armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
- armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
-
- // connect up
- armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
- armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
- armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
- armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 3 }, armnn::DataType::Float32);
-
- inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
- inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
-
- outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
- outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
-
- cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
- cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
-
- lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
- lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
-
- lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
- lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
-
- lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
- lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
-
- lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
- lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
-
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
- BOOST_CHECK(deserializedNetwork);
-
- VerifyLstmLayer checker(
- layerName,
- {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
- {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
- descriptor,
- params);
- deserializedNetwork->Accept(checker);
-}
-
-BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeAndProjection)
-{
- armnn::LstmDescriptor descriptor;
- descriptor.m_ActivationFunc = 4;
- descriptor.m_ClippingThresProj = 0.0f;
- descriptor.m_ClippingThresCell = 0.0f;
- descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
- descriptor.m_ProjectionEnabled = true;
- descriptor.m_PeepholeEnabled = true;
-
- const uint32_t batchSize = 2;
- const uint32_t inputSize = 5;
- const uint32_t numUnits = 20;
- const uint32_t outputSize = 16;
-
- armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
- std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
- armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
-
- std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
- armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
-
- std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
- armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
-
- std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
- armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
-
- armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
- std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
-
- std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
-
- std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
-
- std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
-
- armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
- std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
- armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
-
- std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
- armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
-
- std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
- armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
-
- std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
- armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
-
- std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
-
- std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
-
- std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
-
- armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
- std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
- armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
-
- armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
- std::vector<float> projectionBiasData(outputSize, 0.f);
- armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
-
- armnn::LstmInputParams params;
- params.m_InputToForgetWeights = &inputToForgetWeights;
- params.m_InputToCellWeights = &inputToCellWeights;
- params.m_InputToOutputWeights = &inputToOutputWeights;
- params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
- params.m_RecurrentToCellWeights = &recurrentToCellWeights;
- params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
- params.m_ForgetGateBias = &forgetGateBias;
- params.m_CellBias = &cellBias;
- params.m_OutputGateBias = &outputGateBias;
-
- // additional params because: descriptor.m_CifgEnabled = false
- params.m_InputToInputWeights = &inputToInputWeights;
- params.m_RecurrentToInputWeights = &recurrentToInputWeights;
- params.m_CellToInputWeights = &cellToInputWeights;
- params.m_InputGateBias = &inputGateBias;
-
- // additional params because: descriptor.m_ProjectionEnabled = true
- params.m_ProjectionWeights = &projectionWeights;
- params.m_ProjectionBias = &projectionBias;
-
- // additional params because: descriptor.m_PeepholeEnabled = true
- params.m_CellToForgetWeights = &cellToForgetWeights;
- params.m_CellToOutputWeights = &cellToOutputWeights;
-
- armnn::INetworkPtr network = armnn::INetwork::Create();
- armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
- armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
- armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
- const std::string layerName("lstm");
- armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
- armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
- armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
- armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
- armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
-
- // connect up
- armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
- armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
- armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
- armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
-
- inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
- inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
-
- outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
- outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
-
- cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
- cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
-
- lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
- lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
-
- lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
- lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
-
- lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
- lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
-
- lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
- lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
-
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
- BOOST_CHECK(deserializedNetwork);
-
- VerifyLstmLayer checker(
- layerName,
- {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
- {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
- descriptor,
- params);
- deserializedNetwork->Accept(checker);
-}
-
-BOOST_AUTO_TEST_CASE(SerializeDeserializeLstmNoCifgWithPeepholeWithProjectionWithLayerNorm)
-{
- armnn::LstmDescriptor descriptor;
- descriptor.m_ActivationFunc = 4;
- descriptor.m_ClippingThresProj = 0.0f;
- descriptor.m_ClippingThresCell = 0.0f;
- descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
- descriptor.m_ProjectionEnabled = true;
- descriptor.m_PeepholeEnabled = true;
- descriptor.m_LayerNormEnabled = true;
-
- const uint32_t batchSize = 2;
- const uint32_t inputSize = 5;
- const uint32_t numUnits = 20;
- const uint32_t outputSize = 16;
-
- armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
- std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
- armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
-
- std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
- armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
-
- std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
- armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
-
- std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
- armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
-
- armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
- std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
-
- std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
-
- std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
-
- std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
-
- armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
- std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
- armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
-
- std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
- armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
-
- std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
- armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
-
- std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
- armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
-
- std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
-
- std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
-
- std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
-
- armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
- std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
- armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
-
- armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
- std::vector<float> projectionBiasData(outputSize, 0.f);
- armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
-
- std::vector<float> inputLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor inputLayerNormWeights(tensorInfo20, forgetGateBiasData);
-
- std::vector<float> forgetLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor forgetLayerNormWeights(tensorInfo20, forgetGateBiasData);
-
- std::vector<float> cellLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor cellLayerNormWeights(tensorInfo20, forgetGateBiasData);
-
- std::vector<float> outLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
- armnn::ConstTensor outLayerNormWeights(tensorInfo20, forgetGateBiasData);
-
- armnn::LstmInputParams params;
- params.m_InputToForgetWeights = &inputToForgetWeights;
- params.m_InputToCellWeights = &inputToCellWeights;
- params.m_InputToOutputWeights = &inputToOutputWeights;
- params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
- params.m_RecurrentToCellWeights = &recurrentToCellWeights;
- params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
- params.m_ForgetGateBias = &forgetGateBias;
- params.m_CellBias = &cellBias;
- params.m_OutputGateBias = &outputGateBias;
-
- // additional params because: descriptor.m_CifgEnabled = false
- params.m_InputToInputWeights = &inputToInputWeights;
- params.m_RecurrentToInputWeights = &recurrentToInputWeights;
- params.m_CellToInputWeights = &cellToInputWeights;
- params.m_InputGateBias = &inputGateBias;
-
- // additional params because: descriptor.m_ProjectionEnabled = true
- params.m_ProjectionWeights = &projectionWeights;
- params.m_ProjectionBias = &projectionBias;
-
- // additional params because: descriptor.m_PeepholeEnabled = true
- params.m_CellToForgetWeights = &cellToForgetWeights;
- params.m_CellToOutputWeights = &cellToOutputWeights;
-
- // additional params because: despriptor.m_LayerNormEnabled = true
- params.m_InputLayerNormWeights = &inputLayerNormWeights;
- params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
- params.m_CellLayerNormWeights = &cellLayerNormWeights;
- params.m_OutputLayerNormWeights = &outLayerNormWeights;
-
- armnn::INetworkPtr network = armnn::INetwork::Create();
- armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
- armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
- armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
- const std::string layerName("lstm");
- armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
- armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
- armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
- armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
- armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
-
- // connect up
- armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
- armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
- armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
- armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
-
- inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
- inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
-
- outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
- outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
-
- cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
- cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
-
- lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
- lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
-
- lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
- lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
-
- lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
- lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
-
- lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
- lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
-
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
- BOOST_CHECK(deserializedNetwork);
-
- VerifyLstmLayer checker(
- layerName,
- {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
- {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
- descriptor,
- params);
- deserializedNetwork->Accept(checker);
-}
-
-BOOST_AUTO_TEST_CASE(EnsureLstmLayersBackwardCompatibility)
-{
- // The hex data below is a flat buffer containing a lstm layer with no Cifg, with peephole and projection
- // enabled. That data was obtained before additional layer normalization parameters where added to the
- // lstm serializer. That way it can be tested if a lstm model with the old parameter configuration can
- // still be loaded
- const std::vector<uint8_t> lstmNoCifgWithPeepholeAndProjectionModel =
- {
- 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
- 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
- 0xDC, 0x29, 0x00, 0x00, 0x38, 0x29, 0x00, 0x00, 0xB4, 0x28, 0x00, 0x00, 0x94, 0x01, 0x00, 0x00, 0x3C, 0x01,
- 0x00, 0x00, 0xE0, 0x00, 0x00, 0x00, 0x84, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00,
- 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x70, 0xD6, 0xFF, 0xFF,
- 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x06, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x88, 0xD7,
- 0xFF, 0xFF, 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xF6, 0xD6, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00,
- 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0xE8, 0xD7, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xC8, 0xD6, 0xFF, 0xFF, 0x00, 0x00,
- 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x5E, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xE0, 0xD7, 0xFF, 0xFF,
- 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x4E, 0xD7, 0xFF, 0xFF, 0x06, 0x00, 0x00, 0x00, 0x10, 0x00,
- 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0xD8,
- 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
- 0x04, 0x00, 0x00, 0x00, 0xB6, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x38, 0xD8, 0xFF, 0xFF, 0x08, 0x00,
- 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xA6, 0xD7, 0xFF, 0xFF, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x98, 0xD8, 0xFF, 0xFF,
- 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x78, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00,
- 0x00, 0x00, 0x0E, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x16, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
- 0xFA, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00,
- 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xEC, 0xD8, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x6C, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x23, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00,
- 0x12, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0xE0, 0x25, 0x00, 0x00, 0xD0, 0x25,
- 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x26, 0x00, 0x48, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00,
- 0x10, 0x00, 0x14, 0x00, 0x18, 0x00, 0x1C, 0x00, 0x20, 0x00, 0x24, 0x00, 0x28, 0x00, 0x2C, 0x00, 0x30, 0x00,
- 0x34, 0x00, 0x38, 0x00, 0x3C, 0x00, 0x40, 0x00, 0x44, 0x00, 0x26, 0x00, 0x00, 0x00, 0xC4, 0x23, 0x00, 0x00,
- 0xF8, 0x21, 0x00, 0x00, 0x2C, 0x20, 0x00, 0x00, 0xF0, 0x1A, 0x00, 0x00, 0xB4, 0x15, 0x00, 0x00, 0x78, 0x10,
- 0x00, 0x00, 0xF0, 0x0F, 0x00, 0x00, 0x68, 0x0F, 0x00, 0x00, 0xE0, 0x0E, 0x00, 0x00, 0x14, 0x0D, 0x00, 0x00,
- 0xD8, 0x07, 0x00, 0x00, 0x50, 0x07, 0x00, 0x00, 0xC8, 0x06, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x14, 0x01,
- 0x00, 0x00, 0x8C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
- 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x5A, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
- 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x72, 0xD8,
- 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x82, 0xD9, 0xFF, 0xFF,
- 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDE, 0xD8,
- 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x14, 0x00, 0x00, 0x00, 0xF6, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x54, 0x00, 0x00, 0x00, 0x04, 0x00,
- 0x00, 0x00, 0x06, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xD9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6A, 0xD9, 0xFF, 0xFF, 0x00, 0x00,
- 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7A, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
- 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x86, 0xDE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xA2, 0xDE,
- 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB2, 0xDF, 0xFF, 0xFF,
- 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0xDF,
- 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x14, 0x00, 0x00, 0x00, 0x26, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00,
- 0x00, 0x00, 0x36, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x92, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xAA, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
- 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xBA, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0xC6, 0xE4, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xE2, 0xE4, 0xFF, 0xFF,
- 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF2, 0xE5, 0xFF, 0xFF, 0x04, 0x00,
- 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8E, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
- 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
- 0x00, 0x00, 0xAA, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0xBA, 0xE7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x16, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x2E, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00,
- 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x3E, 0xE8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9A, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xB2, 0xE7, 0xFF, 0xFF,
- 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xC2, 0xE8, 0xFF, 0xFF, 0x04, 0x00,
- 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1E, 0xE8, 0xFF, 0xFF,
- 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00,
- 0x00, 0x00, 0x36, 0xE8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0x46, 0xE9, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xED, 0xFF, 0xFF,
- 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00,
- 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6E, 0xED, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x7E, 0xEE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x8A, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
- 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xA6, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
- 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB6, 0xF3, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0xC2, 0xF7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xDE, 0xF7, 0xFF, 0xFF,
- 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xF8, 0xFF, 0xFF, 0x04, 0x00,
- 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8A, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
- 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
- 0x00, 0x00, 0xA6, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0xB6, 0xFA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xFB,
- 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
- 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x6E, 0xFB, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01,
- 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFC, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x1A, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x0C, 0x00,
- 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x01, 0x01, 0x04, 0x00, 0x00, 0x00, 0x2E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0x22, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6C, 0x73,
- 0x74, 0x6D, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xEC, 0x00, 0x00, 0x00, 0xD0, 0x00, 0x00, 0x00,
- 0xB4, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x88, 0x00, 0x00, 0x00, 0x5C, 0x00, 0x00, 0x00, 0x30, 0x00,
- 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x14, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0xA6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
- 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x3C, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x64, 0xFF, 0xFF, 0xFF,
- 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0xB4, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x1A, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00,
- 0xF0, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00,
- 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
- 0x7E, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00,
- 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x76, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
- 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0x68, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0x08, 0x00, 0x0E, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
- 0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00,
- 0x0E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
- 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
- 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6E, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x08, 0x00,
- 0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
- 0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00,
- 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
- 0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
- 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
- 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00,
- 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
- 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00
- };
-
- armnn::INetworkPtr deserializedNetwork =
- DeserializeNetwork(std::string(lstmNoCifgWithPeepholeAndProjectionModel.begin(),
- lstmNoCifgWithPeepholeAndProjectionModel.end()));
-
- BOOST_CHECK(deserializedNetwork);
-
- // generating the same model parameters which where used to serialize the model (Layer norm is not specified)
- armnn::LstmDescriptor descriptor;
- descriptor.m_ActivationFunc = 4;
- descriptor.m_ClippingThresProj = 0.0f;
- descriptor.m_ClippingThresCell = 0.0f;
- descriptor.m_CifgEnabled = false;
- descriptor.m_ProjectionEnabled = true;
- descriptor.m_PeepholeEnabled = true;
-
- const uint32_t batchSize = 2u;
- const uint32_t inputSize = 5u;
- const uint32_t numUnits = 20u;
- const uint32_t outputSize = 16u;
-
- armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32);
- std::vector<float> inputToInputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
- armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
-
- std::vector<float> inputToForgetWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
- armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
-
- std::vector<float> inputToCellWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
- armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
-
- std::vector<float> inputToOutputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
- armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
-
- armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32);
- std::vector<float> inputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
- armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
-
- std::vector<float> forgetGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
- armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
-
- std::vector<float> cellBiasData(tensorInfo20.GetNumElements(), 0.0f);
- armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
-
- std::vector<float> outputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
- armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
-
- armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32);
- std::vector<float> recurrentToInputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
- armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
-
- std::vector<float> recurrentToForgetWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
- armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
-
- std::vector<float> recurrentToCellWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
- armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
-
- std::vector<float> recurrentToOutputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
- armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
-
- std::vector<float> cellToInputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
- armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
-
- std::vector<float> cellToForgetWeightsData(tensorInfo20.GetNumElements(), 0.0f);
- armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
-
- std::vector<float> cellToOutputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
- armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
-
- armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32);
- std::vector<float> projectionWeightsData(tensorInfo16x20.GetNumElements(), 0.0f);
- armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
-
- armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32);
- std::vector<float> projectionBiasData(outputSize, 0.0f);
- armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
-
- armnn::LstmInputParams params;
- params.m_InputToForgetWeights = &inputToForgetWeights;
- params.m_InputToCellWeights = &inputToCellWeights;
- params.m_InputToOutputWeights = &inputToOutputWeights;
- params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
- params.m_RecurrentToCellWeights = &recurrentToCellWeights;
- params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
- params.m_ForgetGateBias = &forgetGateBias;
- params.m_CellBias = &cellBias;
- params.m_OutputGateBias = &outputGateBias;
-
- // additional params because: descriptor.m_CifgEnabled = false
- params.m_InputToInputWeights = &inputToInputWeights;
- params.m_RecurrentToInputWeights = &recurrentToInputWeights;
- params.m_CellToInputWeights = &cellToInputWeights;
- params.m_InputGateBias = &inputGateBias;
-
- // additional params because: descriptor.m_ProjectionEnabled = true
- params.m_ProjectionWeights = &projectionWeights;
- params.m_ProjectionBias = &projectionBias;
-
- // additional params because: descriptor.m_PeepholeEnabled = true
- params.m_CellToForgetWeights = &cellToForgetWeights;
- params.m_CellToOutputWeights = &cellToOutputWeights;
-
- const std::string layerName("lstm");
- armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
- armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
- armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
- armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
-
- VerifyLstmLayer checker(
- layerName,
- {inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
- {lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
- descriptor,
- params);
- deserializedNetwork->Accept(checker);
-}
-class VerifyQuantizedLstmLayer : public LayerVerifierBase
-{
-
-public:
- VerifyQuantizedLstmLayer(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const armnn::QuantizedLstmInputParams& inputParams)
- : LayerVerifierBase(layerName, inputInfos, outputInfos), m_InputParams(inputParams) {}
-
- void VisitQuantizedLstmLayer(const armnn::IConnectableLayer* layer,
- const armnn::QuantizedLstmInputParams& params,
- const char* name)
- {
- VerifyNameAndConnections(layer, name);
- VerifyInputParameters(params);
- }
-
-protected:
- void VerifyInputParameters(const armnn::QuantizedLstmInputParams& params)
- {
- VerifyConstTensors("m_InputToInputWeights",
- m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
- VerifyConstTensors("m_InputToForgetWeights",
- m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
- VerifyConstTensors("m_InputToCellWeights",
- m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
- VerifyConstTensors("m_InputToOutputWeights",
- m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
- VerifyConstTensors("m_RecurrentToInputWeights",
- m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
- VerifyConstTensors("m_RecurrentToForgetWeights",
- m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
- VerifyConstTensors("m_RecurrentToCellWeights",
- m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
- VerifyConstTensors("m_RecurrentToOutputWeights",
- m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
- VerifyConstTensors("m_InputGateBias",
- m_InputParams.m_InputGateBias, params.m_InputGateBias);
- VerifyConstTensors("m_ForgetGateBias",
- m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
- VerifyConstTensors("m_CellBias",
- m_InputParams.m_CellBias, params.m_CellBias);
- VerifyConstTensors("m_OutputGateBias",
- m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
- }
-
-private:
- armnn::QuantizedLstmInputParams m_InputParams;
-};
-
-BOOST_AUTO_TEST_CASE(SerializeDeserializeQuantizedLstm)
-{
- const uint32_t batchSize = 1;
- const uint32_t inputSize = 2;
- const uint32_t numUnits = 4;
- const uint32_t outputSize = numUnits;
-
- // Scale/Offset for input/output, cellState In/Out, weights, bias
- float inputOutputScale = 0.0078125f;
- int32_t inputOutputOffset = 128;
-
- float cellStateScale = 0.00048828125f;
- int32_t cellStateOffset = 0;
-
- float weightsScale = 0.00408021f;
- int32_t weightsOffset = 100;
-
- float biasScale = 3.1876640625e-05f;
- int32_t biasOffset = 0;
-
- // The shape of weight data is {outputSize, inputSize} = {4, 2}
- armnn::TensorShape inputToInputWeightsShape = {4, 2};
- std::vector<uint8_t> inputToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
- armnn::TensorInfo inputToInputWeightsInfo(inputToInputWeightsShape,
- armnn::DataType::QAsymmU8,
- weightsScale,
- weightsOffset);
- armnn::ConstTensor inputToInputWeights(inputToInputWeightsInfo, inputToInputWeightsData);
-
- armnn::TensorShape inputToForgetWeightsShape = {4, 2};
- std::vector<uint8_t> inputToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
- armnn::TensorInfo inputToForgetWeightsInfo(inputToForgetWeightsShape,
- armnn::DataType::QAsymmU8,
- weightsScale,
- weightsOffset);
- armnn::ConstTensor inputToForgetWeights(inputToForgetWeightsInfo, inputToForgetWeightsData);
-
- armnn::TensorShape inputToCellWeightsShape = {4, 2};
- std::vector<uint8_t> inputToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
- armnn::TensorInfo inputToCellWeightsInfo(inputToCellWeightsShape,
- armnn::DataType::QAsymmU8,
- weightsScale,
- weightsOffset);
- armnn::ConstTensor inputToCellWeights(inputToCellWeightsInfo, inputToCellWeightsData);
-
- armnn::TensorShape inputToOutputWeightsShape = {4, 2};
- std::vector<uint8_t> inputToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
- armnn::TensorInfo inputToOutputWeightsInfo(inputToOutputWeightsShape,
- armnn::DataType::QAsymmU8,
- weightsScale,
- weightsOffset);
- armnn::ConstTensor inputToOutputWeights(inputToOutputWeightsInfo, inputToOutputWeightsData);
-
- // The shape of recurrent weight data is {outputSize, outputSize} = {4, 4}
- armnn::TensorShape recurrentToInputWeightsShape = {4, 4};
- std::vector<uint8_t> recurrentToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
- armnn::TensorInfo recurrentToInputWeightsInfo(recurrentToInputWeightsShape,
- armnn::DataType::QAsymmU8,
- weightsScale,
- weightsOffset);
- armnn::ConstTensor recurrentToInputWeights(recurrentToInputWeightsInfo, recurrentToInputWeightsData);
-
- armnn::TensorShape recurrentToForgetWeightsShape = {4, 4};
- std::vector<uint8_t> recurrentToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
- armnn::TensorInfo recurrentToForgetWeightsInfo(recurrentToForgetWeightsShape,
- armnn::DataType::QAsymmU8,
- weightsScale,
- weightsOffset);
- armnn::ConstTensor recurrentToForgetWeights(recurrentToForgetWeightsInfo, recurrentToForgetWeightsData);
-
- armnn::TensorShape recurrentToCellWeightsShape = {4, 4};
- std::vector<uint8_t> recurrentToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
- armnn::TensorInfo recurrentToCellWeightsInfo(recurrentToCellWeightsShape,
- armnn::DataType::QAsymmU8,
- weightsScale,
- weightsOffset);
- armnn::ConstTensor recurrentToCellWeights(recurrentToCellWeightsInfo, recurrentToCellWeightsData);
-
- armnn::TensorShape recurrentToOutputWeightsShape = {4, 4};
- std::vector<uint8_t> recurrentToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
- armnn::TensorInfo recurrentToOutputWeightsInfo(recurrentToOutputWeightsShape,
- armnn::DataType::QAsymmU8,
- weightsScale,
- weightsOffset);
- armnn::ConstTensor recurrentToOutputWeights(recurrentToOutputWeightsInfo, recurrentToOutputWeightsData);
-
- // The shape of bias data is {outputSize} = {4}
- armnn::TensorShape inputGateBiasShape = {4};
- std::vector<int32_t> inputGateBiasData = {1, 2, 3, 4};
- armnn::TensorInfo inputGateBiasInfo(inputGateBiasShape,
- armnn::DataType::Signed32,
- biasScale,
- biasOffset);
- armnn::ConstTensor inputGateBias(inputGateBiasInfo, inputGateBiasData);
-
- armnn::TensorShape forgetGateBiasShape = {4};
- std::vector<int32_t> forgetGateBiasData = {1, 2, 3, 4};
- armnn::TensorInfo forgetGateBiasInfo(forgetGateBiasShape,
- armnn::DataType::Signed32,
- biasScale,
- biasOffset);
- armnn::ConstTensor forgetGateBias(forgetGateBiasInfo, forgetGateBiasData);
-
- armnn::TensorShape cellBiasShape = {4};
- std::vector<int32_t> cellBiasData = {1, 2, 3, 4};
- armnn::TensorInfo cellBiasInfo(cellBiasShape,
- armnn::DataType::Signed32,
- biasScale,
- biasOffset);
- armnn::ConstTensor cellBias(cellBiasInfo, cellBiasData);
-
- armnn::TensorShape outputGateBiasShape = {4};
- std::vector<int32_t> outputGateBiasData = {1, 2, 3, 4};
- armnn::TensorInfo outputGateBiasInfo(outputGateBiasShape,
- armnn::DataType::Signed32,
- biasScale,
- biasOffset);
- armnn::ConstTensor outputGateBias(outputGateBiasInfo, outputGateBiasData);
-
- armnn::QuantizedLstmInputParams params;
- params.m_InputToInputWeights = &inputToInputWeights;
- params.m_InputToForgetWeights = &inputToForgetWeights;
- params.m_InputToCellWeights = &inputToCellWeights;
- params.m_InputToOutputWeights = &inputToOutputWeights;
- params.m_RecurrentToInputWeights = &recurrentToInputWeights;
- params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
- params.m_RecurrentToCellWeights = &recurrentToCellWeights;
- params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
- params.m_InputGateBias = &inputGateBias;
- params.m_ForgetGateBias = &forgetGateBias;
- params.m_CellBias = &cellBias;
- params.m_OutputGateBias = &outputGateBias;
-
- armnn::INetworkPtr network = armnn::INetwork::Create();
- armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
- armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
- armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
- const std::string layerName("QuantizedLstm");
- armnn::IConnectableLayer* const quantizedLstmLayer = network->AddQuantizedLstmLayer(params, layerName.c_str());
- armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(0);
- armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(1);
-
- // Connect up
- armnn::TensorInfo inputTensorInfo({ batchSize, inputSize },
- armnn::DataType::QAsymmU8,
- inputOutputScale,
- inputOutputOffset);
- armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits },
- armnn::DataType::QSymmS16,
- cellStateScale,
- cellStateOffset);
- armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize },
- armnn::DataType::QAsymmU8,
- inputOutputScale,
- inputOutputOffset);
-
- inputLayer->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(0));
- inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
-
- cellStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(1));
- cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
-
- outputStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(2));
- outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
-
- quantizedLstmLayer->GetOutputSlot(0).Connect(cellStateOut->GetInputSlot(0));
- quantizedLstmLayer->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
-
- quantizedLstmLayer->GetOutputSlot(1).Connect(outputLayer->GetInputSlot(0));
- quantizedLstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
-
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
- BOOST_CHECK(deserializedNetwork);
-
- VerifyQuantizedLstmLayer checker(layerName,
- {inputTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
- {cellStateTensorInfo, outputStateTensorInfo},
- params);
-
- deserializedNetwork->Accept(checker);
-}
-
-class VerifyQLstmLayer : public LayerVerifierBaseWithDescriptor<armnn::QLstmDescriptor>
-{
-public:
- VerifyQLstmLayer(const std::string& layerName,
- const std::vector<armnn::TensorInfo>& inputInfos,
- const std::vector<armnn::TensorInfo>& outputInfos,
- const armnn::QLstmDescriptor& descriptor,
- const armnn::LstmInputParams& inputParams)
- : LayerVerifierBaseWithDescriptor<armnn::QLstmDescriptor>(layerName, inputInfos, outputInfos, descriptor)
- , m_InputParams(inputParams) {}
-
- void VisitQLstmLayer(const armnn::IConnectableLayer* layer,
- const armnn::QLstmDescriptor& descriptor,
- const armnn::LstmInputParams& params,
- const char* name)
- {
- VerifyNameAndConnections(layer, name);
- VerifyDescriptor(descriptor);
- VerifyInputParameters(params);
- }
-
-protected:
- void VerifyInputParameters(const armnn::LstmInputParams& params)
- {
- VerifyConstTensors(
- "m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
- VerifyConstTensors(
- "m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
- VerifyConstTensors(
- "m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
- VerifyConstTensors(
- "m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
- VerifyConstTensors(
- "m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
- VerifyConstTensors(
- "m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
- VerifyConstTensors(
- "m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
- VerifyConstTensors(
- "m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
- VerifyConstTensors(
- "m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights);
- VerifyConstTensors(
- "m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights);
- VerifyConstTensors(
- "m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights);
- VerifyConstTensors(
- "m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias);
- VerifyConstTensors(
- "m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
- VerifyConstTensors(
- "m_CellBias", m_InputParams.m_CellBias, params.m_CellBias);
- VerifyConstTensors(
- "m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
- VerifyConstTensors(
- "m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights);
- VerifyConstTensors(
- "m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias);
- VerifyConstTensors(
- "m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights);
- VerifyConstTensors(
- "m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights);
- VerifyConstTensors(
- "m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights);
- VerifyConstTensors(
- "m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights);
- }
-
-private:
- armnn::LstmInputParams m_InputParams;
-};
-
-BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmBasic)
-{
- armnn::QLstmDescriptor descriptor;
-
- descriptor.m_CifgEnabled = true;
- descriptor.m_ProjectionEnabled = false;
- descriptor.m_PeepholeEnabled = false;
- descriptor.m_LayerNormEnabled = false;
-
- descriptor.m_CellClip = 0.0f;
- descriptor.m_ProjectionClip = 0.0f;
-
- descriptor.m_InputIntermediateScale = 0.00001f;
- descriptor.m_ForgetIntermediateScale = 0.00001f;
- descriptor.m_CellIntermediateScale = 0.00001f;
- descriptor.m_OutputIntermediateScale = 0.00001f;
-
- descriptor.m_HiddenStateScale = 0.07f;
- descriptor.m_HiddenStateZeroPoint = 0;
-
- const unsigned int numBatches = 2;
- const unsigned int inputSize = 5;
- const unsigned int outputSize = 4;
- const unsigned int numUnits = 4;
-
- // Scale/Offset quantization info
- float inputScale = 0.0078f;
- int32_t inputOffset = 0;
-
- float outputScale = 0.0078f;
- int32_t outputOffset = 0;
-
- float cellStateScale = 3.5002e-05f;
- int32_t cellStateOffset = 0;
-
- float weightsScale = 0.007f;
- int32_t weightsOffset = 0;
-
- float biasScale = 3.5002e-05f / 1024;
- int32_t biasOffset = 0;
-
- // Weights and bias tensor and quantization info
- armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
- armnn::DataType::QSymmS8,
- weightsScale,
- weightsOffset);
-
- armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
- armnn::DataType::QSymmS8,
- weightsScale,
- weightsOffset);
-
- armnn::TensorInfo biasInfo({numUnits}, armnn::DataType::Signed32, biasScale, biasOffset);
-
- std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
- std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
- std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
-
- armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
- armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
- armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
-
- std::vector<int8_t> recurrentToForgetWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
- std::vector<int8_t> recurrentToCellWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
- std::vector<int8_t> recurrentToOutputWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
-
- armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
- armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
- armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
-
- std::vector<int32_t> forgetGateBiasData(numUnits, 1);
- std::vector<int32_t> cellBiasData(numUnits, 0);
- std::vector<int32_t> outputGateBiasData(numUnits, 0);
-
- armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
- armnn::ConstTensor cellBias(biasInfo, cellBiasData);
- armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
-
- // Set up params
- armnn::LstmInputParams params;
- params.m_InputToForgetWeights = &inputToForgetWeights;
- params.m_InputToCellWeights = &inputToCellWeights;
- params.m_InputToOutputWeights = &inputToOutputWeights;
-
- params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
- params.m_RecurrentToCellWeights = &recurrentToCellWeights;
- params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
-
- params.m_ForgetGateBias = &forgetGateBias;
- params.m_CellBias = &cellBias;
- params.m_OutputGateBias = &outputGateBias;
-
- // Create network
- armnn::INetworkPtr network = armnn::INetwork::Create();
- const std::string layerName("qLstm");
-
- armnn::IConnectableLayer* const input = network->AddInputLayer(0);
- armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
- armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
-
- armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
-
- armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
- armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
- armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
-
- // Input/Output tensor info
- armnn::TensorInfo inputInfo({numBatches , inputSize},
- armnn::DataType::QAsymmS8,
- inputScale,
- inputOffset);
-
- armnn::TensorInfo cellStateInfo({numBatches , numUnits},
- armnn::DataType::QSymmS16,
- cellStateScale,
- cellStateOffset);
-
- armnn::TensorInfo outputStateInfo({numBatches , outputSize},
- armnn::DataType::QAsymmS8,
- outputScale,
- outputOffset);
-
- // Connect input/output slots
- input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
- input->GetOutputSlot(0).SetTensorInfo(inputInfo);
-
- outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
- outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
-
- cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
- cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
-
- qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
- qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
-
- qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
- qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
-
- qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
- qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
-
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
- BOOST_CHECK(deserializedNetwork);
-
- VerifyQLstmLayer checker(layerName,
- {inputInfo, cellStateInfo, outputStateInfo},
- {outputStateInfo, cellStateInfo, outputStateInfo},
- descriptor,
- params);
-
- deserializedNetwork->Accept(checker);
-}
-
-BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmCifgLayerNorm)
-{
- armnn::QLstmDescriptor descriptor;
-
- // CIFG params are used when CIFG is disabled
- descriptor.m_CifgEnabled = true;
- descriptor.m_ProjectionEnabled = false;
- descriptor.m_PeepholeEnabled = false;
- descriptor.m_LayerNormEnabled = true;
-
- descriptor.m_CellClip = 0.0f;
- descriptor.m_ProjectionClip = 0.0f;
-
- descriptor.m_InputIntermediateScale = 0.00001f;
- descriptor.m_ForgetIntermediateScale = 0.00001f;
- descriptor.m_CellIntermediateScale = 0.00001f;
- descriptor.m_OutputIntermediateScale = 0.00001f;
-
- descriptor.m_HiddenStateScale = 0.07f;
- descriptor.m_HiddenStateZeroPoint = 0;
-
- const unsigned int numBatches = 2;
- const unsigned int inputSize = 5;
- const unsigned int outputSize = 4;
- const unsigned int numUnits = 4;
-
- // Scale/Offset quantization info
- float inputScale = 0.0078f;
- int32_t inputOffset = 0;
-
- float outputScale = 0.0078f;
- int32_t outputOffset = 0;
-
- float cellStateScale = 3.5002e-05f;
- int32_t cellStateOffset = 0;
-
- float weightsScale = 0.007f;
- int32_t weightsOffset = 0;
-
- float layerNormScale = 3.5002e-05f;
- int32_t layerNormOffset = 0;
-
- float biasScale = layerNormScale / 1024;
- int32_t biasOffset = 0;
-
- // Weights and bias tensor and quantization info
- armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
- armnn::DataType::QSymmS8,
- weightsScale,
- weightsOffset);
-
- armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
- armnn::DataType::QSymmS8,
- weightsScale,
- weightsOffset);
-
- armnn::TensorInfo biasInfo({numUnits},
- armnn::DataType::Signed32,
- biasScale,
- biasOffset);
-
- armnn::TensorInfo layerNormWeightsInfo({numUnits},
- armnn::DataType::QSymmS16,
- layerNormScale,
- layerNormOffset);
-
- // Mandatory params
- std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
- std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
- std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
-
- armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
- armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
- armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
-
- std::vector<int8_t> recurrentToForgetWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
- std::vector<int8_t> recurrentToCellWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
- std::vector<int8_t> recurrentToOutputWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
-
- armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
- armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
- armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
-
- std::vector<int32_t> forgetGateBiasData(numUnits, 1);
- std::vector<int32_t> cellBiasData(numUnits, 0);
- std::vector<int32_t> outputGateBiasData(numUnits, 0);
-
- armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
- armnn::ConstTensor cellBias(biasInfo, cellBiasData);
- armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
-
- // Layer Norm
- std::vector<int16_t> forgetLayerNormWeightsData =
- GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
- std::vector<int16_t> cellLayerNormWeightsData =
- GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
- std::vector<int16_t> outputLayerNormWeightsData =
- GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
-
- armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData);
- armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData);
- armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData);
-
- // Set up params
- armnn::LstmInputParams params;
-
- // Mandatory params
- params.m_InputToForgetWeights = &inputToForgetWeights;
- params.m_InputToCellWeights = &inputToCellWeights;
- params.m_InputToOutputWeights = &inputToOutputWeights;
-
- params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
- params.m_RecurrentToCellWeights = &recurrentToCellWeights;
- params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
-
- params.m_ForgetGateBias = &forgetGateBias;
- params.m_CellBias = &cellBias;
- params.m_OutputGateBias = &outputGateBias;
-
- // Layer Norm
- params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
- params.m_CellLayerNormWeights = &cellLayerNormWeights;
- params.m_OutputLayerNormWeights = &outputLayerNormWeights;
-
- // Create network
- armnn::INetworkPtr network = armnn::INetwork::Create();
- const std::string layerName("qLstm");
-
- armnn::IConnectableLayer* const input = network->AddInputLayer(0);
- armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
- armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
-
- armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
-
- armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
- armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
- armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
-
- // Input/Output tensor info
- armnn::TensorInfo inputInfo({numBatches , inputSize},
- armnn::DataType::QAsymmS8,
- inputScale,
- inputOffset);
-
- armnn::TensorInfo cellStateInfo({numBatches , numUnits},
- armnn::DataType::QSymmS16,
- cellStateScale,
- cellStateOffset);
-
- armnn::TensorInfo outputStateInfo({numBatches , outputSize},
- armnn::DataType::QAsymmS8,
- outputScale,
- outputOffset);
-
- // Connect input/output slots
- input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
- input->GetOutputSlot(0).SetTensorInfo(inputInfo);
-
- outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
- outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
-
- cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
- cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
-
- qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
- qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
-
- qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
- qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
-
- qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
- qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
-
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
- BOOST_CHECK(deserializedNetwork);
-
- VerifyQLstmLayer checker(layerName,
- {inputInfo, cellStateInfo, outputStateInfo},
- {outputStateInfo, cellStateInfo, outputStateInfo},
- descriptor,
- params);
-
- deserializedNetwork->Accept(checker);
-}
-
-BOOST_AUTO_TEST_CASE(SerializeDeserializeQLstmAdvanced)
-{
- armnn::QLstmDescriptor descriptor;
-
- descriptor.m_CifgEnabled = false;
- descriptor.m_ProjectionEnabled = true;
- descriptor.m_PeepholeEnabled = true;
- descriptor.m_LayerNormEnabled = true;
-
- descriptor.m_CellClip = 0.1f;
- descriptor.m_ProjectionClip = 0.1f;
-
- descriptor.m_InputIntermediateScale = 0.00001f;
- descriptor.m_ForgetIntermediateScale = 0.00001f;
- descriptor.m_CellIntermediateScale = 0.00001f;
- descriptor.m_OutputIntermediateScale = 0.00001f;
-
- descriptor.m_HiddenStateScale = 0.07f;
- descriptor.m_HiddenStateZeroPoint = 0;
-
- const unsigned int numBatches = 2;
- const unsigned int inputSize = 5;
- const unsigned int outputSize = 4;
- const unsigned int numUnits = 4;
-
- // Scale/Offset quantization info
- float inputScale = 0.0078f;
- int32_t inputOffset = 0;
-
- float outputScale = 0.0078f;
- int32_t outputOffset = 0;
-
- float cellStateScale = 3.5002e-05f;
- int32_t cellStateOffset = 0;
-
- float weightsScale = 0.007f;
- int32_t weightsOffset = 0;
-
- float layerNormScale = 3.5002e-05f;
- int32_t layerNormOffset = 0;
-
- float biasScale = layerNormScale / 1024;
- int32_t biasOffset = 0;
-
- // Weights and bias tensor and quantization info
- armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
- armnn::DataType::QSymmS8,
- weightsScale,
- weightsOffset);
-
- armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
- armnn::DataType::QSymmS8,
- weightsScale,
- weightsOffset);
-
- armnn::TensorInfo biasInfo({numUnits},
- armnn::DataType::Signed32,
- biasScale,
- biasOffset);
-
- armnn::TensorInfo peepholeWeightsInfo({numUnits},
- armnn::DataType::QSymmS16,
- weightsScale,
- weightsOffset);
-
- armnn::TensorInfo layerNormWeightsInfo({numUnits},
- armnn::DataType::QSymmS16,
- layerNormScale,
- layerNormOffset);
-
- armnn::TensorInfo projectionWeightsInfo({outputSize, numUnits},
- armnn::DataType::QSymmS8,
- weightsScale,
- weightsOffset);
-
- // Mandatory params
- std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
- std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
- std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
-
- armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
- armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
- armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
-
- std::vector<int8_t> recurrentToForgetWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
- std::vector<int8_t> recurrentToCellWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
- std::vector<int8_t> recurrentToOutputWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
-
- armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
- armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
- armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
-
- std::vector<int32_t> forgetGateBiasData(numUnits, 1);
- std::vector<int32_t> cellBiasData(numUnits, 0);
- std::vector<int32_t> outputGateBiasData(numUnits, 0);
-
- armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
- armnn::ConstTensor cellBias(biasInfo, cellBiasData);
- armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
-
- // CIFG
- std::vector<int8_t> inputToInputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
- std::vector<int8_t> recurrentToInputWeightsData =
- GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
- std::vector<int32_t> inputGateBiasData(numUnits, 1);
-
- armnn::ConstTensor inputToInputWeights(inputWeightsInfo, inputToInputWeightsData);
- armnn::ConstTensor recurrentToInputWeights(recurrentWeightsInfo, recurrentToInputWeightsData);
- armnn::ConstTensor inputGateBias(biasInfo, inputGateBiasData);
-
- // Peephole
- std::vector<int16_t> cellToInputWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
- std::vector<int16_t> cellToForgetWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
- std::vector<int16_t> cellToOutputWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
-
- armnn::ConstTensor cellToInputWeights(peepholeWeightsInfo, cellToInputWeightsData);
- armnn::ConstTensor cellToForgetWeights(peepholeWeightsInfo, cellToForgetWeightsData);
- armnn::ConstTensor cellToOutputWeights(peepholeWeightsInfo, cellToOutputWeightsData);
-
- // Projection
- std::vector<int8_t> projectionWeightsData = GenerateRandomData<int8_t>(projectionWeightsInfo.GetNumElements());
- std::vector<int32_t> projectionBiasData(outputSize, 1);
-
- armnn::ConstTensor projectionWeights(projectionWeightsInfo, projectionWeightsData);
- armnn::ConstTensor projectionBias(biasInfo, projectionBiasData);
-
- // Layer Norm
- std::vector<int16_t> inputLayerNormWeightsData =
- GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
- std::vector<int16_t> forgetLayerNormWeightsData =
- GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
- std::vector<int16_t> cellLayerNormWeightsData =
- GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
- std::vector<int16_t> outputLayerNormWeightsData =
- GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
-
- armnn::ConstTensor inputLayerNormWeights(layerNormWeightsInfo, inputLayerNormWeightsData);
- armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData);
- armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData);
- armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData);
-
- // Set up params
- armnn::LstmInputParams params;
-
- // Mandatory params
- params.m_InputToForgetWeights = &inputToForgetWeights;
- params.m_InputToCellWeights = &inputToCellWeights;
- params.m_InputToOutputWeights = &inputToOutputWeights;
-
- params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
- params.m_RecurrentToCellWeights = &recurrentToCellWeights;
- params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
-
- params.m_ForgetGateBias = &forgetGateBias;
- params.m_CellBias = &cellBias;
- params.m_OutputGateBias = &outputGateBias;
-
- // CIFG
- params.m_InputToInputWeights = &inputToInputWeights;
- params.m_RecurrentToInputWeights = &recurrentToInputWeights;
- params.m_InputGateBias = &inputGateBias;
-
- // Peephole
- params.m_CellToInputWeights = &cellToInputWeights;
- params.m_CellToForgetWeights = &cellToForgetWeights;
- params.m_CellToOutputWeights = &cellToOutputWeights;
-
- // Projection
- params.m_ProjectionWeights = &projectionWeights;
- params.m_ProjectionBias = &projectionBias;
-
- // Layer Norm
- params.m_InputLayerNormWeights = &inputLayerNormWeights;
- params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
- params.m_CellLayerNormWeights = &cellLayerNormWeights;
- params.m_OutputLayerNormWeights = &outputLayerNormWeights;
-
- // Create network
- armnn::INetworkPtr network = armnn::INetwork::Create();
- const std::string layerName("qLstm");
-
- armnn::IConnectableLayer* const input = network->AddInputLayer(0);
- armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
- armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
-
- armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
-
- armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
- armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
- armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
-
- // Input/Output tensor info
- armnn::TensorInfo inputInfo({numBatches , inputSize},
- armnn::DataType::QAsymmS8,
- inputScale,
- inputOffset);
-
- armnn::TensorInfo cellStateInfo({numBatches , numUnits},
- armnn::DataType::QSymmS16,
- cellStateScale,
- cellStateOffset);
-
- armnn::TensorInfo outputStateInfo({numBatches , outputSize},
- armnn::DataType::QAsymmS8,
- outputScale,
- outputOffset);
-
- // Connect input/output slots
- input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
- input->GetOutputSlot(0).SetTensorInfo(inputInfo);
-
- outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
- outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
-
- cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
- cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
-
- qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
- qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
-
- qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
- qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
-
- qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
- qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
-
- armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
- BOOST_CHECK(deserializedNetwork);
-
- VerifyQLstmLayer checker(layerName,
- {inputInfo, cellStateInfo, outputStateInfo},
- {outputStateInfo, cellStateInfo, outputStateInfo},
- descriptor,
- params);
-
- deserializedNetwork->Accept(checker);
+ deserializedNetwork->ExecuteStrategy(verifier);
}
BOOST_AUTO_TEST_SUITE_END()