diff options
author | Éanna Ó Catháin <eanna.ocathain@arm.com> | 2019-02-25 16:26:29 +0000 |
---|---|---|
committer | Eanna O Cathain Arm <eanna.ocathain@arm.com> | 2019-02-25 17:28:08 +0000 |
commit | 633f859043522adb7629f3f7ea617cd65b3713f2 (patch) | |
tree | 39fdc2fdc5709076c1852f85ee8aa6702c83dae6 /src/armnnDeserializer/Deserializer.cpp | |
parent | 268509ac1750c6c8d7c6f0debd9dca7e42612341 (diff) | |
download | armnn-633f859043522adb7629f3f7ea617cd65b3713f2.tar.gz |
IVGCVSW-2751 When layers are Deserialized the layer name is lost
* Fixed defect
* Added unit tests to check names
Change-Id: I67211110b90aabff6e00bccdadd1145423b07523
Signed-off-by: Mike Kelly <mike.kelly@arm.com>
Signed-off-by: Éanna Ó Catháin <eanna.ocathain@arm.com>
Diffstat (limited to 'src/armnnDeserializer/Deserializer.cpp')
-rw-r--r-- | src/armnnDeserializer/Deserializer.cpp | 42 |
1 files changed, 21 insertions, 21 deletions
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp index 08fe7a5e90..09c0502886 100644 --- a/src/armnnDeserializer/Deserializer.cpp +++ b/src/armnnDeserializer/Deserializer.cpp @@ -235,6 +235,13 @@ Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPt } } +std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index) +{ + auto layer = GetBaseLayer(graph, index); + assert(layer); + return layer->layerName()->str(); +} + int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex) { auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type(); @@ -727,9 +734,8 @@ void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex) auto outputs = GetOutputs(graph, layerIndex); CHECK_VALID_SIZE(outputs.size(), 1); - auto layerName = boost::str(boost::format("Activation:%1%") % layerIndex); - auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer(); + auto layerName = GetLayerName(graph, layerIndex); auto serializerDescriptor = serializerLayer->descriptor(); armnn::ActivationDescriptor descriptor; @@ -756,8 +762,8 @@ void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex) auto outputs = GetOutputs(graph, layerIndex); CHECK_VALID_SIZE(outputs.size(), 1); - m_layerName = boost::str(boost::format("Addition:%1%") % layerIndex); - IConnectableLayer* layer = m_Network->AddAdditionLayer(m_layerName.c_str()); + auto layerName = GetLayerName(graph, layerIndex); + IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str()); armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]); layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); @@ -776,9 +782,8 @@ void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex) auto outputs = GetOutputs(graph, layerIndex); CHECK_VALID_SIZE(outputs.size(), 1); - auto layerName = boost::str(boost::format("Convolution2d:%1%") % layerIndex); - auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer(); + auto layerName = GetLayerName(graph, layerIndex); auto serializerDescriptor = serializerLayer->descriptor(); armnn::Convolution2dDescriptor descriptor; @@ -819,9 +824,8 @@ void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int laye auto outputs = GetOutputs(graph, layerIndex); CHECK_VALID_SIZE(outputs.size(), 1); - auto layerName = boost::str(boost::format("DepthwiseConvolution2d:%1%") % layerIndex); - auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer(); + auto layerName = GetLayerName(graph, layerIndex); auto serializerDescriptor = serializerLayer->descriptor(); armnn::DepthwiseConvolution2dDescriptor descriptor; @@ -863,8 +867,8 @@ void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex) auto outputs = GetOutputs(graph, layerIndex); CHECK_VALID_SIZE(outputs.size(), 1); - m_layerName = boost::str(boost::format("Multiplication:%1%") % layerIndex); - IConnectableLayer* layer = m_Network->AddMultiplicationLayer(m_layerName.c_str()); + auto layerName = GetLayerName(graph, layerIndex); + IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str()); armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]); layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); @@ -883,9 +887,8 @@ void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex) auto outputs = GetOutputs(graph, layerIndex); CHECK_VALID_SIZE(outputs.size(), 1); - auto layerName = boost::str(boost::format("FullyConnected:%1%") % layerIndex); - auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer(); + auto layerName = GetLayerName(graph, layerIndex); auto flatBufferDescriptor = flatBufferLayer->descriptor(); armnn::FullyConnectedDescriptor fullyConnectedDescriptor; @@ -931,10 +934,10 @@ void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex) CHECK_VALID_SIZE(outputs.size(), 1); auto outputInfo = ToTensorInfo(outputs[0]); - m_layerName = boost::str(boost::format("Permute:%1%") % layerIndex); + auto layerName = GetLayerName(graph, layerIndex); const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length())); - IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, m_layerName.c_str()); + IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str()); layer->GetOutputSlot(0).SetTensorInfo(outputInfo); RegisterInputSlots(graph, layerIndex, layer); @@ -951,13 +954,11 @@ armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::Pool case PoolingAlgorithm_Average: { desc.m_PoolType = armnn::PoolingAlgorithm::Average; - m_layerName = boost::str(boost::format("AveragePool2D:%1%") % layerIndex); break; } case PoolingAlgorithm_Max: { desc.m_PoolType = armnn::PoolingAlgorithm::Max; - m_layerName = boost::str(boost::format("MaxPool2D:%1%") % layerIndex); break; } default: @@ -1037,7 +1038,6 @@ void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex) CHECK_LAYERS(graph, 0, layerIndex); auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor(); - auto inputs = GetInputs(graph, layerIndex); CHECK_VALID_SIZE(inputs.size(), 1); @@ -1046,8 +1046,8 @@ void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex) auto outputInfo = ToTensorInfo(outputs[0]); auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex); - - IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, m_layerName.c_str()); + auto layerName = GetLayerName(graph, layerIndex); + IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str()); layer->GetOutputSlot(0).SetTensorInfo(outputInfo); RegisterInputSlots(graph, layerIndex, layer); @@ -1119,7 +1119,7 @@ void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex) armnn::ReshapeDescriptor reshapeDesc; reshapeDesc.m_TargetShape = reshapeOutputTensorShape; - auto layerName = boost::str(boost::format("Reshape:%1%") % layerIndex); + auto layerName = GetLayerName(graph, layerIndex); IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str()); layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo); @@ -1139,8 +1139,8 @@ void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex) armnn::SoftmaxDescriptor descriptor; descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta(); + auto layerName = GetLayerName(graph, layerIndex); - const std::string layerName = boost::str(boost::format("Softmax:%1%") % layerIndex); IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str()); armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]); |