aboutsummaryrefslogtreecommitdiff
path: root/src/armnnDeserializer/Deserializer.cpp
diff options
context:
space:
mode:
authorruoyan01 <ruomei.yan@arm.com>2019-02-28 15:09:07 +0000
committerruoyan01 <ruomei.yan@arm.com>2019-03-01 16:39:27 +0000
commit8e7fa232b4e637cc02f2ca344b2113c63cdc7e5a (patch)
tree3c200afe3c7cab37b553ba0461aed4410b7cfbb8 /src/armnnDeserializer/Deserializer.cpp
parentdd2ba7ebf78a75aadd8ddd2ae1a4226ffc4ae4d9 (diff)
downloadarmnn-8e7fa232b4e637cc02f2ca344b2113c63cdc7e5a.tar.gz
IVGCVSW-2681 Serialize/de-serialize the BatchNormalization layer
Change-Id: I418c4465366742262fb6e6c1eeba76c634beaeb5 Signed-off-by: ruoyan01 <ruomei.yan@arm.com>
Diffstat (limited to 'src/armnnDeserializer/Deserializer.cpp')
-rw-r--r--src/armnnDeserializer/Deserializer.cpp40
1 files changed, 40 insertions, 0 deletions
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp
index 77bd7498ee..e8cda2e3d3 100644
--- a/src/armnnDeserializer/Deserializer.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -188,6 +188,7 @@ m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
+ m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
@@ -220,6 +221,8 @@ Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPt
return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
case Layer::Layer_BatchToSpaceNdLayer:
return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
+ case Layer::Layer_BatchNormalizationLayer:
+ return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
case Layer::Layer_ConstantLayer:
return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
case Layer::Layer_Convolution2dLayer:
@@ -848,6 +851,43 @@ void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
RegisterOutputSlots(graph, layerIndex, layer);
}
+void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
+{
+ CHECK_LAYERS(graph, 0, layerIndex);
+
+ auto inputs = GetInputs(graph, layerIndex);
+ CHECK_VALID_SIZE(inputs.size(), 1);
+
+ auto outputs = GetOutputs(graph, layerIndex);
+ CHECK_VALID_SIZE(outputs.size(), 1);
+ auto outputInfo = ToTensorInfo(outputs[0]);
+
+ auto layerName = boost::str(boost::format("BatchNormalization:%1%") % layerIndex);
+
+ auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
+ auto serializerDescriptor = serializerLayer->descriptor();
+
+ armnn::BatchNormalizationDescriptor descriptor;
+ descriptor.m_Eps = serializerDescriptor->eps();
+ descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
+
+ armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
+ armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
+ armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
+ armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
+
+ IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
+ mean,
+ variance,
+ beta,
+ gamma,
+ layerName.c_str());
+ layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
+
+ RegisterInputSlots(graph, layerIndex, layer);
+ RegisterOutputSlots(graph, layerIndex, layer);
+}
+
void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
{
CHECK_LAYERS(graph, 0, layerIndex);