aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorÉanna Ó Catháin <eanna.ocathain@arm.com>2019-02-25 16:26:29 +0000
committerEanna O Cathain Arm <eanna.ocathain@arm.com>2019-02-25 17:28:08 +0000
commit633f859043522adb7629f3f7ea617cd65b3713f2 (patch)
tree39fdc2fdc5709076c1852f85ee8aa6702c83dae6
parent268509ac1750c6c8d7c6f0debd9dca7e42612341 (diff)
downloadarmnn-633f859043522adb7629f3f7ea617cd65b3713f2.tar.gz
IVGCVSW-2751 When layers are Deserialized the layer name is lost
* Fixed defect * Added unit tests to check names Change-Id: I67211110b90aabff6e00bccdadd1145423b07523 Signed-off-by: Mike Kelly <mike.kelly@arm.com> Signed-off-by: Éanna Ó Catháin <eanna.ocathain@arm.com>
-rw-r--r--src/armnnDeserializer/Deserializer.cpp42
-rw-r--r--src/armnnDeserializer/Deserializer.hpp2
-rw-r--r--src/armnnSerializer/test/ActivationSerializationTests.cpp14
-rw-r--r--src/armnnSerializer/test/SerializerTests.cpp172
4 files changed, 185 insertions, 45 deletions
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp
index 08fe7a5e90..09c0502886 100644
--- a/src/armnnDeserializer/Deserializer.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -235,6 +235,13 @@ Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPt
}
}
+std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
+{
+ auto layer = GetBaseLayer(graph, index);
+ assert(layer);
+ return layer->layerName()->str();
+}
+
int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
{
auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
@@ -727,9 +734,8 @@ void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
auto outputs = GetOutputs(graph, layerIndex);
CHECK_VALID_SIZE(outputs.size(), 1);
- auto layerName = boost::str(boost::format("Activation:%1%") % layerIndex);
-
auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
+ auto layerName = GetLayerName(graph, layerIndex);
auto serializerDescriptor = serializerLayer->descriptor();
armnn::ActivationDescriptor descriptor;
@@ -756,8 +762,8 @@ void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
auto outputs = GetOutputs(graph, layerIndex);
CHECK_VALID_SIZE(outputs.size(), 1);
- m_layerName = boost::str(boost::format("Addition:%1%") % layerIndex);
- IConnectableLayer* layer = m_Network->AddAdditionLayer(m_layerName.c_str());
+ auto layerName = GetLayerName(graph, layerIndex);
+ IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
@@ -776,9 +782,8 @@ void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
auto outputs = GetOutputs(graph, layerIndex);
CHECK_VALID_SIZE(outputs.size(), 1);
- auto layerName = boost::str(boost::format("Convolution2d:%1%") % layerIndex);
-
auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
+ auto layerName = GetLayerName(graph, layerIndex);
auto serializerDescriptor = serializerLayer->descriptor();
armnn::Convolution2dDescriptor descriptor;
@@ -819,9 +824,8 @@ void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int laye
auto outputs = GetOutputs(graph, layerIndex);
CHECK_VALID_SIZE(outputs.size(), 1);
- auto layerName = boost::str(boost::format("DepthwiseConvolution2d:%1%") % layerIndex);
-
auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
+ auto layerName = GetLayerName(graph, layerIndex);
auto serializerDescriptor = serializerLayer->descriptor();
armnn::DepthwiseConvolution2dDescriptor descriptor;
@@ -863,8 +867,8 @@ void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
auto outputs = GetOutputs(graph, layerIndex);
CHECK_VALID_SIZE(outputs.size(), 1);
- m_layerName = boost::str(boost::format("Multiplication:%1%") % layerIndex);
- IConnectableLayer* layer = m_Network->AddMultiplicationLayer(m_layerName.c_str());
+ auto layerName = GetLayerName(graph, layerIndex);
+ IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
@@ -883,9 +887,8 @@ void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
auto outputs = GetOutputs(graph, layerIndex);
CHECK_VALID_SIZE(outputs.size(), 1);
- auto layerName = boost::str(boost::format("FullyConnected:%1%") % layerIndex);
-
auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
+ auto layerName = GetLayerName(graph, layerIndex);
auto flatBufferDescriptor = flatBufferLayer->descriptor();
armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
@@ -931,10 +934,10 @@ void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
CHECK_VALID_SIZE(outputs.size(), 1);
auto outputInfo = ToTensorInfo(outputs[0]);
- m_layerName = boost::str(boost::format("Permute:%1%") % layerIndex);
+ auto layerName = GetLayerName(graph, layerIndex);
const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
- IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, m_layerName.c_str());
+ IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
RegisterInputSlots(graph, layerIndex, layer);
@@ -951,13 +954,11 @@ armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::Pool
case PoolingAlgorithm_Average:
{
desc.m_PoolType = armnn::PoolingAlgorithm::Average;
- m_layerName = boost::str(boost::format("AveragePool2D:%1%") % layerIndex);
break;
}
case PoolingAlgorithm_Max:
{
desc.m_PoolType = armnn::PoolingAlgorithm::Max;
- m_layerName = boost::str(boost::format("MaxPool2D:%1%") % layerIndex);
break;
}
default:
@@ -1037,7 +1038,6 @@ void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
CHECK_LAYERS(graph, 0, layerIndex);
auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
-
auto inputs = GetInputs(graph, layerIndex);
CHECK_VALID_SIZE(inputs.size(), 1);
@@ -1046,8 +1046,8 @@ void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
auto outputInfo = ToTensorInfo(outputs[0]);
auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
-
- IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, m_layerName.c_str());
+ auto layerName = GetLayerName(graph, layerIndex);
+ IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
RegisterInputSlots(graph, layerIndex, layer);
@@ -1119,7 +1119,7 @@ void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
armnn::ReshapeDescriptor reshapeDesc;
reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
- auto layerName = boost::str(boost::format("Reshape:%1%") % layerIndex);
+ auto layerName = GetLayerName(graph, layerIndex);
IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
@@ -1139,8 +1139,8 @@ void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
armnn::SoftmaxDescriptor descriptor;
descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
+ auto layerName = GetLayerName(graph, layerIndex);
- const std::string layerName = boost::str(boost::format("Softmax:%1%") % layerIndex);
IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
diff --git a/src/armnnDeserializer/Deserializer.hpp b/src/armnnDeserializer/Deserializer.hpp
index 1e775d0c4a..94318e4062 100644
--- a/src/armnnDeserializer/Deserializer.hpp
+++ b/src/armnnDeserializer/Deserializer.hpp
@@ -50,6 +50,7 @@ public:
static LayerBaseRawPtrVector GetGraphOutputs(const GraphPtr& graphPtr);
static LayerBaseRawPtr GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex);
static int32_t GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex);
+ static std::string GetLayerName(const GraphPtr& graph, unsigned int index);
armnn::Pooling2dDescriptor GetPoolingDescriptor(PoolingDescriptor pooling2dDescriptor,
unsigned int layerIndex);
static armnn::TensorInfo OutputShapeOfReshape(const armnn::TensorInfo & inputTensorInfo,
@@ -92,7 +93,6 @@ private:
/// The network we're building. Gets cleared after it is passed to the user
armnn::INetworkPtr m_Network;
std::vector<LayerParsingFunction> m_ParserFunctions;
- std::string m_layerName;
using NameToBindingInfo = std::pair<std::string, BindingPointInfo >;
std::vector<NameToBindingInfo> m_InputBindings;
diff --git a/src/armnnSerializer/test/ActivationSerializationTests.cpp b/src/armnnSerializer/test/ActivationSerializationTests.cpp
index c20f2864f9..2a46045f59 100644
--- a/src/armnnSerializer/test/ActivationSerializationTests.cpp
+++ b/src/armnnSerializer/test/ActivationSerializationTests.cpp
@@ -12,6 +12,17 @@
BOOST_AUTO_TEST_SUITE(SerializerTests)
+class VerifyActivationName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+{
+public:
+ void VisitActivationLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ActivationDescriptor& activationDescriptor,
+ const char* name) override
+ {
+ BOOST_TEST(name == "activation");
+ }
+};
+
BOOST_AUTO_TEST_CASE(ActivationSerialization)
{
armnnDeserializer::IDeserializerPtr parser = armnnDeserializer::IDeserializer::Create();
@@ -48,6 +59,9 @@ BOOST_AUTO_TEST_CASE(ActivationSerialization)
armnn::INetworkPtr deserializedNetwork = parser->CreateNetworkFromBinary(serializerVector);
+ VerifyActivationName visitor;
+ deserializedNetwork->Accept(visitor);
+
armnn::IRuntime::CreationOptions options; // default options
armnn::IRuntimePtr run = armnn::IRuntime::Create(options);
auto deserializedOptimized = Optimize(*deserializedNetwork, { armnn::Compute::CpuRef }, run->GetDeviceSpec());
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index ede24baf9e..bb050520a4 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -134,22 +134,31 @@ BOOST_AUTO_TEST_SUITE(SerializerTests)
BOOST_AUTO_TEST_CASE(SerializeAddition)
{
+ class VerifyAdditionName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ public:
+ void VisitAdditionLayer(const armnn::IConnectableLayer*, const char* name) override
+ {
+ BOOST_TEST(name == "addition");
+ }
+ };
+
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
- armnn::IConnectableLayer* const additionLayer0 = network->AddAdditionLayer();
- inputLayer0->GetOutputSlot(0).Connect(additionLayer0->GetInputSlot(0));
- inputLayer1->GetOutputSlot(0).Connect(additionLayer0->GetInputSlot(1));
+ armnn::IConnectableLayer* const additionLayer = network->AddAdditionLayer("addition");
+ inputLayer0->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
+ inputLayer1->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
- armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
- additionLayer0->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+ additionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
armnn::TensorShape shape{1U};
armnn::TensorInfo info(shape, armnn::DataType::Float32);
inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
- additionLayer0->GetOutputSlot(0).SetTensorInfo(info);
+ additionLayer->GetOutputSlot(0).SetTensorInfo(info);
armnnSerializer::Serializer serializer;
serializer.Serialize(*network);
@@ -157,28 +166,43 @@ BOOST_AUTO_TEST_CASE(SerializeAddition)
std::stringstream stream;
serializer.SaveSerializedToStream(stream);
BOOST_TEST(stream.str().length() > 0);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(stream.str());
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyAdditionName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
}
BOOST_AUTO_TEST_CASE(SerializeMultiplication)
{
+ class VerifyMultiplicationName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ public:
+ void VisitMultiplicationLayer(const armnn::IConnectableLayer*, const char* name) override
+ {
+ BOOST_TEST(name == "multiplication");
+ }
+ };
+
const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
armnn::IConnectableLayer* const inputLayer1 = network->AddInputLayer(1);
- const char* multLayerName = "mult_0";
+ const char* multLayerName = "multiplication";
- armnn::IConnectableLayer* const multiplicationLayer0 = network->AddMultiplicationLayer(multLayerName);
- inputLayer0->GetOutputSlot(0).Connect(multiplicationLayer0->GetInputSlot(0));
- inputLayer1->GetOutputSlot(0).Connect(multiplicationLayer0->GetInputSlot(1));
+ armnn::IConnectableLayer* const multiplicationLayer = network->AddMultiplicationLayer(multLayerName);
+ inputLayer0->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
+ inputLayer1->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
- armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
- multiplicationLayer0->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+ multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
- multiplicationLayer0->GetOutputSlot(0).SetTensorInfo(info);
+ multiplicationLayer->GetOutputSlot(0).SetTensorInfo(info);
armnnSerializer::Serializer serializer;
serializer.Serialize(*network);
@@ -187,10 +211,30 @@ BOOST_AUTO_TEST_CASE(SerializeMultiplication)
serializer.SaveSerializedToStream(stream);
BOOST_TEST(stream.str().length() > 0);
BOOST_TEST(stream.str().find(multLayerName) != stream.str().npos);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(stream.str());
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyMultiplicationName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
}
BOOST_AUTO_TEST_CASE(SerializeDeserializeConvolution2d)
{
+
+ class VerifyConvolution2dName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ public:
+ void VisitConvolution2dLayer(const armnn::IConnectableLayer*,
+ const armnn::Convolution2dDescriptor&,
+ const armnn::ConstTensor&,
+ const armnn::Optional<armnn::ConstTensor>&,
+ const char* name) override
+ {
+ BOOST_TEST(name == "convolution");
+ }
+ };
+
armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32);
armnn::TensorInfo outputInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32);
@@ -230,6 +274,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeConvolution2d)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
+ VerifyConvolution2dName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
+
CheckDeserializedNetworkAgainstOriginal(*network,
*deserializedNetwork,
inputInfo.GetShape(),
@@ -238,6 +285,15 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeConvolution2d)
BOOST_AUTO_TEST_CASE(SerializeDeserializeReshape)
{
+ class VerifyReshapeName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ public:
+ void VisitReshapeLayer(const armnn::IConnectableLayer*, const armnn::ReshapeDescriptor&, const char* name)
+ {
+ BOOST_TEST(name == "reshape");
+ }
+ };
+
unsigned int inputShape[] = { 1, 9 };
unsigned int outputShape[] = { 3, 3 };
@@ -249,9 +305,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeReshape)
reshapeDescriptor.m_TargetShape = reshapeOutputTensorInfo.GetShape();
armnn::INetworkPtr network = armnn::INetwork::Create();
- armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
- armnn::IConnectableLayer *const reshapeLayer = network->AddReshapeLayer(reshapeDescriptor, "ReshapeLayer");
- armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const reshapeLayer = network->AddReshapeLayer(reshapeDescriptor, "reshape");
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
inputLayer->GetOutputSlot(0).Connect(reshapeLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
@@ -261,6 +317,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeReshape)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
+ VerifyReshapeName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
+
CheckDeserializedNetworkAgainstOriginal(*network,
*deserializedNetwork,
inputTensorInfo.GetShape(),
@@ -269,6 +328,19 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeReshape)
BOOST_AUTO_TEST_CASE(SerializeDeserializeDepthwiseConvolution2d)
{
+ class VerifyDepthwiseConvolution2dName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ public:
+ void VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer*,
+ const armnn::DepthwiseConvolution2dDescriptor&,
+ const armnn::ConstTensor&,
+ const armnn::Optional<armnn::ConstTensor>&,
+ const char* name) override
+ {
+ BOOST_TEST(name == "depthwise_convolution");
+ }
+ };
+
armnn::TensorInfo inputInfo ({ 1, 5, 5, 3 }, armnn::DataType::Float32);
armnn::TensorInfo outputInfo({ 1, 3, 3, 3 }, armnn::DataType::Float32);
@@ -290,7 +362,7 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeDepthwiseConvolution2d)
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const depthwiseConvLayer =
- network->AddDepthwiseConvolution2dLayer(descriptor, weights, biases, "depthwiseConv");
+ network->AddDepthwiseConvolution2dLayer(descriptor, weights, biases, "depthwise_convolution");
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
inputLayer->GetOutputSlot(0).Connect(depthwiseConvLayer->GetInputSlot(0));
@@ -301,6 +373,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeDepthwiseConvolution2d)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
+ VerifyDepthwiseConvolution2dName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
+
CheckDeserializedNetworkAgainstOriginal(*network,
*deserializedNetwork,
inputInfo.GetShape(),
@@ -309,6 +384,15 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeDepthwiseConvolution2d)
BOOST_AUTO_TEST_CASE(SerializeDeserializeSoftmax)
{
+ class VerifySoftmaxName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ public:
+ void VisitSoftmaxLayer(const armnn::IConnectableLayer*, const armnn::SoftmaxDescriptor&, const char* name)
+ {
+ BOOST_TEST(name == "softmax");
+ }
+ };
+
armnn::TensorInfo tensorInfo({1, 10}, armnn::DataType::Float32);
armnn::SoftmaxDescriptor descriptor;
@@ -327,6 +411,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeSoftmax)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
+ VerifySoftmaxName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
+
CheckDeserializedNetworkAgainstOriginal(*network,
*deserializedNetwork,
tensorInfo.GetShape(),
@@ -335,6 +422,14 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeSoftmax)
BOOST_AUTO_TEST_CASE(SerializeDeserializePooling2d)
{
+ class VerifyPooling2dName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ void VisitPooling2dLayer(const armnn::IConnectableLayer*, const armnn::Pooling2dDescriptor&, const char* name)
+ {
+ BOOST_TEST(name == "pooling2d");
+ }
+ };
+
unsigned int inputShape[] = {1, 2, 2, 1};
unsigned int outputShape[] = {1, 1, 1, 1};
@@ -356,9 +451,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializePooling2d)
desc.m_StrideY = 2;
armnn::INetworkPtr network = armnn::INetwork::Create();
- armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
- armnn::IConnectableLayer *const pooling2dLayer = network->AddPooling2dLayer(desc, "ReshapeLayer");
- armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const pooling2dLayer = network->AddPooling2dLayer(desc, "pooling2d");
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
inputLayer->GetOutputSlot(0).Connect(pooling2dLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
@@ -368,6 +463,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializePooling2d)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
+ VerifyPooling2dName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
+
CheckDeserializedNetworkAgainstOriginal(*network,
*deserializedNetwork,
inputInfo.GetShape(),
@@ -376,6 +474,15 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializePooling2d)
BOOST_AUTO_TEST_CASE(SerializeDeserializePermute)
{
+ class VerifyPermuteName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ public:
+ void VisitPermuteLayer(const armnn::IConnectableLayer*, const armnn::PermuteDescriptor&, const char* name)
+ {
+ BOOST_TEST(name == "permute");
+ }
+ };
+
unsigned int inputShape[] = { 4, 3, 2, 1 };
unsigned int outputShape[] = { 1, 2, 3, 4 };
unsigned int dimsMapping[] = { 3, 2, 1, 0 };
@@ -386,9 +493,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializePermute)
armnn::PermuteDescriptor permuteDescriptor(armnn::PermutationVector(dimsMapping, 4));
armnn::INetworkPtr network = armnn::INetwork::Create();
- armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
- armnn::IConnectableLayer *const permuteLayer = network->AddPermuteLayer(permuteDescriptor, "PermuteLayer");
- armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const permuteLayer = network->AddPermuteLayer(permuteDescriptor, "permute");
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
inputLayer->GetOutputSlot(0).Connect(permuteLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
@@ -398,6 +505,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializePermute)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
+ VerifyPermuteName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
+
CheckDeserializedNetworkAgainstOriginal(*network,
*deserializedNetwork,
inputTensorInfo.GetShape(),
@@ -406,6 +516,19 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializePermute)
BOOST_AUTO_TEST_CASE(SerializeDeserializeFullyConnected)
{
+ class VerifyFullyConnectedName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ public:
+ void VisitFullyConnectedLayer(const armnn::IConnectableLayer*,
+ const armnn::FullyConnectedDescriptor&,
+ const armnn::ConstTensor&,
+ const armnn::Optional<armnn::ConstTensor>&,
+ const char* name) override
+ {
+ BOOST_TEST(name == "fully_connected");
+ }
+ };
+
armnn::TensorInfo inputInfo ({ 2, 5, 1, 1 }, armnn::DataType::Float32);
armnn::TensorInfo outputInfo({ 2, 3 }, armnn::DataType::Float32);
@@ -439,6 +562,9 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeFullyConnected)
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
BOOST_CHECK(deserializedNetwork);
+ VerifyFullyConnectedName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
+
CheckDeserializedNetworkAgainstOriginal(*network,
*deserializedNetwork,
inputInfo.GetShape(),