diff options
Diffstat (limited to 'src/armnnSerializer')
-rw-r--r-- | src/armnnSerializer/ArmnnSchema.fbs | 19 | ||||
-rw-r--r-- | src/armnnSerializer/Serializer.cpp | 29 | ||||
-rw-r--r-- | src/armnnSerializer/Serializer.hpp | 8 | ||||
-rw-r--r-- | src/armnnSerializer/SerializerSupport.md | 1 | ||||
-rw-r--r-- | src/armnnSerializer/test/SerializerTests.cpp | 68 |
5 files changed, 124 insertions, 1 deletions
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs index 4e3180f0ff..f416912618 100644 --- a/src/armnnSerializer/ArmnnSchema.fbs +++ b/src/armnnSerializer/ArmnnSchema.fbs @@ -103,7 +103,8 @@ enum LayerType : uint { Normalization = 19, Pad = 20, Rsqrt = 21, - Floor = 22 + Floor = 22, + BatchNormalization = 23 } // Base layer table to be used as part of other layers @@ -340,14 +341,30 @@ table PadDescriptor { padList:[uint]; } + table RsqrtLayer { base:LayerBase; } +table BatchNormalizationLayer { + base:LayerBase; + descriptor:BatchNormalizationDescriptor; + mean:ConstTensor; + variance:ConstTensor; + beta:ConstTensor; + gamma:ConstTensor; +} + +table BatchNormalizationDescriptor { + eps:float; + dataLayout:DataLayout; +} + union Layer { ActivationLayer, AdditionLayer, BatchToSpaceNdLayer, + BatchNormalizationLayer, ConstantLayer, Convolution2dLayer, DepthwiseConvolution2dLayer, diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp index d40cdfa591..423706ceb3 100644 --- a/src/armnnSerializer/Serializer.cpp +++ b/src/armnnSerializer/Serializer.cpp @@ -170,6 +170,35 @@ void SerializerVisitor::VisitBatchToSpaceNdLayer(const armnn::IConnectableLayer* CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_BatchToSpaceNdLayer); } +void SerializerVisitor::VisitBatchNormalizationLayer(const armnn::IConnectableLayer* layer, + const armnn::BatchNormalizationDescriptor& batchNormDescriptor, + const armnn::ConstTensor& mean, + const armnn::ConstTensor& variance, + const armnn::ConstTensor& beta, + const armnn::ConstTensor& gamma, + const char* name) +{ + auto fbBatchNormalizationBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_BatchNormalization); + auto fbBatchNormalizationDescriptor = serializer::CreateBatchNormalizationDescriptor( + m_flatBufferBuilder, + batchNormDescriptor.m_Eps, + GetFlatBufferDataLayout(batchNormDescriptor.m_DataLayout)); + + auto fbMeanConstTensorInfo = CreateConstTensorInfo(mean); + auto fbVarianceConstTensorInfo = CreateConstTensorInfo(variance); + auto fbBetaConstTensorInfo = CreateConstTensorInfo(beta); + auto fbGammaConstTensorInfo = CreateConstTensorInfo(gamma); + auto fbBatchNormalizationLayer = serializer::CreateBatchNormalizationLayer(m_flatBufferBuilder, + fbBatchNormalizationBaseLayer, + fbBatchNormalizationDescriptor, + fbMeanConstTensorInfo, + fbVarianceConstTensorInfo, + fbBetaConstTensorInfo, + fbGammaConstTensorInfo); + + CreateAnyLayer(fbBatchNormalizationLayer.o, serializer::Layer::Layer_BatchNormalizationLayer); +} + // Build FlatBuffer for Constant Layer void SerializerVisitor::VisitConstantLayer(const armnn::IConnectableLayer* layer, const armnn::ConstTensor& input, diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp index bf599b1a1f..a60d19b860 100644 --- a/src/armnnSerializer/Serializer.hpp +++ b/src/armnnSerializer/Serializer.hpp @@ -53,6 +53,14 @@ public: const armnn::BatchToSpaceNdDescriptor& descriptor, const char* name = nullptr) override; + void VisitBatchNormalizationLayer(const armnn::IConnectableLayer* layer, + const armnn::BatchNormalizationDescriptor& BatchNormalizationDescriptor, + const armnn::ConstTensor& mean, + const armnn::ConstTensor& variance, + const armnn::ConstTensor& beta, + const armnn::ConstTensor& gamma, + const char* name = nullptr) override; + void VisitConstantLayer(const armnn::IConnectableLayer* layer, const armnn::ConstTensor& input, const char* = nullptr) override; diff --git a/src/armnnSerializer/SerializerSupport.md b/src/armnnSerializer/SerializerSupport.md index b5a322f525..98023a6771 100644 --- a/src/armnnSerializer/SerializerSupport.md +++ b/src/armnnSerializer/SerializerSupport.md @@ -9,6 +9,7 @@ The Arm NN SDK Serializer currently supports the following layers: * Activation * Addition * BatchToSpaceNd +* BatchNormalization * Constant * Convolution2d * DepthwiseConvolution2d diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp index 5f1745bda5..3ef15831b1 100644 --- a/src/armnnSerializer/test/SerializerTests.cpp +++ b/src/armnnSerializer/test/SerializerTests.cpp @@ -888,6 +888,74 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeBatchToSpaceNd) {outputTensorInfo.GetShape()}); } +BOOST_AUTO_TEST_CASE(SerializeDeserializeBatchNormalization) +{ + class VerifyBatchNormalizationName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy> + { + public: + void VisitBatchNormalizationLayer(const armnn::IConnectableLayer*, + const armnn::BatchNormalizationDescriptor&, + const armnn::ConstTensor&, + const armnn::ConstTensor&, + const armnn::ConstTensor&, + const armnn::ConstTensor&, + const char* name) override + { + BOOST_TEST(name == "BatchNormalization:1"); + } + }; + + armnn::TensorInfo inputInfo ({ 1, 3, 3, 1 }, armnn::DataType::Float32); + armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32); + + armnn::TensorInfo meanInfo({1}, armnn::DataType::Float32); + armnn::TensorInfo varianceInfo({1}, armnn::DataType::Float32); + armnn::TensorInfo scaleInfo({1}, armnn::DataType::Float32); + armnn::TensorInfo offsetInfo({1}, armnn::DataType::Float32); + + armnn::BatchNormalizationDescriptor descriptor; + descriptor.m_Eps = 0.0010000000475f; + descriptor.m_DataLayout = armnn::DataLayout::NHWC; + + std::vector<float> meanData({5.0}); + std::vector<float> varianceData({2.0}); + std::vector<float> scaleData({1.0}); + std::vector<float> offsetData({0.0}); + + armnn::ConstTensor mean(meanInfo, meanData); + armnn::ConstTensor variance(varianceInfo, varianceData); + armnn::ConstTensor scale(scaleInfo, scaleData); + armnn::ConstTensor offset(offsetInfo, offsetData); + + armnn::INetworkPtr network = armnn::INetwork::Create(); + armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); + armnn::IConnectableLayer* const batchNormalizationLayer = network->AddBatchNormalizationLayer( + descriptor, + mean, + variance, + scale, + offset, + "BatchNormalizationLayer"); + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0); + + inputLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0)); + inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo); + + batchNormalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0)); + batchNormalizationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); + + VerifyBatchNormalizationName nameChecker; + deserializedNetwork->Accept(nameChecker); + + CheckDeserializedNetworkAgainstOriginal(*network, + *deserializedNetwork, + {inputInfo.GetShape()}, + {outputInfo.GetShape()}); +} + BOOST_AUTO_TEST_CASE(SerializeDivision) { class VerifyDivisionName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy> |