From 2605b236d103e1ba27069e0d668599042a4761af Mon Sep 17 00:00:00 2001 From: Finn Williams Date: Wed, 10 Jun 2020 15:53:46 +0100 Subject: IVGCVSW-4624 Add a RANK Reference Implementation * Add Rank front end * Add Rank reference implementation * Add Rank serialization support * Add Scalar serialization support Signed-off-by: Finn Williams Change-Id: I06e4a468c2a84e79bae2e6c5348596bbbf853b4b --- src/armnnDeserializer/Deserializer.cpp | 35 +++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) (limited to 'src/armnnDeserializer/Deserializer.cpp') diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp index 31fae2af86..7143cdbdcc 100644 --- a/src/armnnDeserializer/Deserializer.cpp +++ b/src/armnnDeserializer/Deserializer.cpp @@ -1,5 +1,5 @@ // -// Copyright © 2017 Arm Ltd. All rights reserved. +// Copyright © 2017 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -225,6 +225,7 @@ m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer) m_ParserFunctions[Layer_QLstmLayer] = &Deserializer::ParseQLstm; m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize; m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm; + m_ParserFunctions[Layer_RankLayer] = &Deserializer::ParseRank; m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape; m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear; m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize; @@ -331,6 +332,8 @@ Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPt return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base(); case Layer::Layer_QuantizedLstmLayer: return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base(); + case Layer::Layer_RankLayer: + return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base(); case Layer::Layer_ReshapeLayer: return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base(); case Layer::Layer_ResizeBilinearLayer: @@ -545,6 +548,16 @@ armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr) } } + if (tensorPtr->dimensionality() == static_cast(Dimensionality::Scalar)) + { + float quantizationScale = tensorPtr->quantizationScale(); + int32_t quantizationOffset = tensorPtr->quantizationOffset(); + + return armnn::TensorInfo(armnn::TensorShape{armnn::Dimensionality::Scalar}, + type, + quantizationScale, + quantizationOffset); + } auto dimensions = tensorPtr->dimensions(); unsigned int size = dimensions->size(); @@ -2008,6 +2021,26 @@ armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& in return reshapeInfo; } +void Deserializer::ParseRank(GraphPtr graph, unsigned int layerIndex) +{ + CHECK_LAYERS(graph, 0, layerIndex); + + Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex); + CHECK_VALID_SIZE(inputs.size(), 1); + + Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex); + CHECK_VALID_SIZE(outputs.size(), 1); + + auto layerName = GetLayerName(graph, layerIndex); + IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str()); + + armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]); + layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); + + RegisterInputSlots(graph, layerIndex, layer); + RegisterOutputSlots(graph, layerIndex, layer); +} + void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex) { CHECK_LAYERS(graph, 0, layerIndex); -- cgit v1.2.1