diff options
author | Nattapat Chaimanowong <nattapat.chaimanowong@arm.com> | 2019-03-01 16:14:13 +0000 |
---|---|---|
committer | Nattapat Chaimanowong <nattapat.chaimanowong@arm.com> | 2019-03-04 10:14:57 +0000 |
commit | 6522cdcd8b07aa8f423f832305eed57d79891e92 (patch) | |
tree | c6f757413eb0a904c6bb6a5c1080d927acaeb84d /src/armnnDeserializer/Deserializer.cpp | |
parent | 79ffdf57b12da6ed3fe70e62311af6c4661a9bd3 (diff) | |
download | armnn-6522cdcd8b07aa8f423f832305eed57d79891e92.tar.gz |
IVGCVSW-2705 Add Serializer and Deserializer for ResizeBilinear
Change-Id: Ibc5689a2e00d38dc98ef39e50ed5dc3b91791e16
Signed-off-by: Nattapat Chaimanowong <nattapat.chaimanowong@arm.com>
Diffstat (limited to 'src/armnnDeserializer/Deserializer.cpp')
-rw-r--r-- | src/armnnDeserializer/Deserializer.cpp | 30 |
1 files changed, 30 insertions, 0 deletions
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp index aebdf0e52c..aa7454339e 100644 --- a/src/armnnDeserializer/Deserializer.cpp +++ b/src/armnnDeserializer/Deserializer.cpp @@ -205,6 +205,7 @@ m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer) m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute; m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d; m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape; + m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear; m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt; m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax; m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd; @@ -260,6 +261,8 @@ Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPt return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base(); case Layer::Layer_ReshapeLayer: return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base(); + case Layer::Layer_ResizeBilinearLayer: + return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base(); case Layer::Layer_RsqrtLayer: return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base(); case Layer::Layer_SoftmaxLayer: @@ -1431,6 +1434,33 @@ void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex) RegisterOutputSlots(graph, layerIndex, layer); } +void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex) +{ + CHECK_LAYERS(graph, 0, layerIndex); + + Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex); + CHECK_VALID_SIZE(inputs.size(), 1); + + Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex); + CHECK_VALID_SIZE(outputs.size(), 1); + + auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor(); + + armnn::ResizeBilinearDescriptor descriptor; + descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth(); + descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight(); + descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout()); + + auto layerName = GetLayerName(graph, layerIndex); + IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str()); + + armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]); + layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); + + RegisterInputSlots(graph, layerIndex, layer); + RegisterOutputSlots(graph, layerIndex, layer); +} + void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex) { CHECK_LAYERS(graph, 0, layerIndex); |