aboutsummaryrefslogtreecommitdiff
path: root/src/armnnSerializer
diff options
context:
space:
mode:
authorJim Flynn <jim.flynn@arm.com>2019-02-28 10:40:49 +0000
committerJim Flynn <jim.flynn@arm.com>2019-03-07 15:43:47 +0000
commitac25a1beda8da71a82c0cf2795e2a6eaaeaa26b1 (patch)
tree2f186eac90aa72f224bd0eccaee3cffa83dea164 /src/armnnSerializer
parentd469faf863f4ecd3ba56f27e51884ef0dfeac7bf (diff)
downloadarmnn-ac25a1beda8da71a82c0cf2795e2a6eaaeaa26b1.tar.gz
IVGCVSW-2697 Add Serialize/Deserialize for the Merger Layer
* Force generation of schema header in every build * Also fixed typo in OriginsDescriptor comment (Descriptors.hpp) * Added Serialize/Deserialize check on Addition Layer * Added Serialize/Deserialize check on Floor Layer * Added Serialize/Deserialize check on Minimum Layer * Added Serialize/Deserialize check on Maximum Layer * Added Serialize/Deserialize check on Multiplication Layer * Added Serialize/Deserialize check on Division Layer Change-Id: I1358ea4db7ca506d8bcec2ee64e1fbad6005e723 Signed-off-by: Jim Flynn <jim.flynn@arm.com>
Diffstat (limited to 'src/armnnSerializer')
-rw-r--r--src/armnnSerializer/ArmnnSchema.fbs22
-rw-r--r--src/armnnSerializer/Serializer.cpp33
-rw-r--r--src/armnnSerializer/Serializer.hpp4
-rw-r--r--src/armnnSerializer/SerializerSupport.md1
-rw-r--r--src/armnnSerializer/test/SerializerTests.cpp105
5 files changed, 155 insertions, 10 deletions
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs
index 01142ff391..a5fb4b6697 100644
--- a/src/armnnSerializer/ArmnnSchema.fbs
+++ b/src/armnnSerializer/ArmnnSchema.fbs
@@ -110,7 +110,8 @@ enum LayerType : uint {
Subtraction = 26,
StridedSlice = 27,
Gather = 28,
- Mean = 29
+ Mean = 29,
+ Merger = 30
}
// Base layer table to be used as part of other layers
@@ -415,6 +416,22 @@ table StridedSliceDescriptor {
dataLayout:DataLayout;
}
+table MergerLayer {
+ base:LayerBase;
+ descriptor:OriginsDescriptor;
+}
+
+table UintVector {
+ data:[uint];
+}
+
+table OriginsDescriptor {
+ concatAxis:uint;
+ numViews:uint;
+ numDimensions:uint;
+ viewOrigins:[UintVector];
+}
+
union Layer {
ActivationLayer,
AdditionLayer,
@@ -445,7 +462,8 @@ union Layer {
SubtractionLayer,
StridedSliceLayer,
GatherLayer,
- MeanLayer
+ MeanLayer,
+ MergerLayer
}
table AnyLayer {
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 56c4281a87..3b71e5fc7d 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -361,6 +361,39 @@ void SerializerVisitor::VisitMinimumLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(fbMinimumLayer.o, serializer::Layer::Layer_MinimumLayer);
}
+void SerializerVisitor::VisitMergerLayer(const armnn::IConnectableLayer* layer,
+ const armnn::OriginsDescriptor& mergerDescriptor,
+ const char* name)
+{
+ auto flatBufferMergerBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Merger);
+
+ std::vector<flatbuffers::Offset<UintVector>> views;
+ for (unsigned int v = 0; v < mergerDescriptor.GetNumViews(); ++v)
+ {
+ const uint32_t* origin = mergerDescriptor.GetViewOrigin(v);
+ std::vector<uint32_t> origins;
+ for (unsigned int d = 0; d < mergerDescriptor.GetNumDimensions(); ++d)
+ {
+ origins.push_back(origin[d]);
+ }
+ auto view = m_flatBufferBuilder.CreateVector(origins);
+ auto uintVector = CreateUintVector(m_flatBufferBuilder, view);
+ views.push_back(uintVector);
+ }
+
+ auto flatBufferMergerDescriptor = CreateOriginsDescriptor(m_flatBufferBuilder,
+ mergerDescriptor.GetConcatAxis(),
+ mergerDescriptor.GetNumViews(),
+ mergerDescriptor.GetNumDimensions(),
+ m_flatBufferBuilder.CreateVector(views));
+
+ auto flatBufferLayer = CreateMergerLayer(m_flatBufferBuilder,
+ flatBufferMergerBaseLayer,
+ flatBufferMergerDescriptor);
+
+ CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_MergerLayer);
+}
+
void SerializerVisitor::VisitMultiplicationLayer(const armnn::IConnectableLayer* layer, const char* name)
{
auto fbMultiplicationBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Multiplication);
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index f928c37d30..e93e4cead6 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -112,6 +112,10 @@ public:
void VisitMaximumLayer(const armnn::IConnectableLayer* layer,
const char* name = nullptr) override;
+ void VisitMergerLayer(const armnn::IConnectableLayer* layer,
+ const armnn::OriginsDescriptor& mergerDescriptor,
+ const char* name = nullptr) override;
+
void VisitMultiplicationLayer(const armnn::IConnectableLayer* layer,
const char* name = nullptr) override;
diff --git a/src/armnnSerializer/SerializerSupport.md b/src/armnnSerializer/SerializerSupport.md
index bb50242e90..ae8691ed04 100644
--- a/src/armnnSerializer/SerializerSupport.md
+++ b/src/armnnSerializer/SerializerSupport.md
@@ -21,6 +21,7 @@ The Arm NN SDK Serializer currently supports the following layers:
* Greater
* Maximum
* Mean
+* Merger
* Minimum
* Multiplication
* Normalization
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index a18ae32a03..5a054c210c 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -148,7 +148,7 @@ void CheckDeserializedNetworkAgainstOriginal(const armnn::INetwork& deserialized
BOOST_AUTO_TEST_SUITE(SerializerTests)
-BOOST_AUTO_TEST_CASE(SerializeAddition)
+BOOST_AUTO_TEST_CASE(SerializeDeserializeAddition)
{
class VerifyAdditionName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
{
@@ -188,6 +188,12 @@ BOOST_AUTO_TEST_CASE(SerializeAddition)
VerifyAdditionName nameChecker;
deserializedNetwork->Accept(nameChecker);
+
+ CheckDeserializedNetworkAgainstOriginal<float>(*deserializedNetwork,
+ *network,
+ {info.GetShape(), info.GetShape()},
+ {info.GetShape()},
+ {0, 1});
}
BOOST_AUTO_TEST_CASE(SerializeConstant)
@@ -255,7 +261,7 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeConstant)
{commonTensorInfo.GetShape()});
}
-BOOST_AUTO_TEST_CASE(SerializeFloor)
+BOOST_AUTO_TEST_CASE(SerializeDeserializeFloor)
{
class VerifyFloorName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
{
@@ -269,7 +275,7 @@ BOOST_AUTO_TEST_CASE(SerializeFloor)
const armnn::TensorInfo info({4,4}, armnn::DataType::Float32);
armnn::INetworkPtr network = armnn::INetwork::Create();
- armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
const char* floorLayerName = "floor";
@@ -295,9 +301,14 @@ BOOST_AUTO_TEST_CASE(SerializeFloor)
VerifyFloorName nameChecker;
deserializedNetwork->Accept(nameChecker);
+
+ CheckDeserializedNetworkAgainstOriginal<float>(*deserializedNetwork,
+ *network,
+ {info.GetShape()},
+ {info.GetShape()});
}
-BOOST_AUTO_TEST_CASE(SerializeMinimum)
+BOOST_AUTO_TEST_CASE(SerializeDeserializeMinimum)
{
class VerifyMinimumName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
{
@@ -346,9 +357,15 @@ BOOST_AUTO_TEST_CASE(SerializeMinimum)
VerifyMinimumName nameChecker(minimumLayerName);
deserializedNetwork->Accept(nameChecker);
+
+ CheckDeserializedNetworkAgainstOriginal<float>(*deserializedNetwork,
+ *network,
+ {info.GetShape(), info.GetShape()},
+ {info.GetShape()},
+ {0, 1});
}
-BOOST_AUTO_TEST_CASE(SerializeMaximum)
+BOOST_AUTO_TEST_CASE(SerializeDeserializeMaximum)
{
class VerifyMaximumName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
{
@@ -397,9 +414,15 @@ BOOST_AUTO_TEST_CASE(SerializeMaximum)
VerifyMaximumName nameChecker(maximumLayerName);
deserializedNetwork->Accept(nameChecker);
+
+ CheckDeserializedNetworkAgainstOriginal<float>(*deserializedNetwork,
+ *network,
+ {info.GetShape(), info.GetShape()},
+ {info.GetShape()},
+ {0, 1});
}
-BOOST_AUTO_TEST_CASE(SerializeMultiplication)
+BOOST_AUTO_TEST_CASE(SerializeDeserializeMultiplication)
{
class VerifyMultiplicationName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
{
@@ -442,6 +465,12 @@ BOOST_AUTO_TEST_CASE(SerializeMultiplication)
VerifyMultiplicationName nameChecker;
deserializedNetwork->Accept(nameChecker);
+
+ CheckDeserializedNetworkAgainstOriginal<float>(*deserializedNetwork,
+ *network,
+ {info.GetShape(), info.GetShape()},
+ {info.GetShape()},
+ {0, 1});
}
BOOST_AUTO_TEST_CASE(SerializeDeserializeConvolution2d)
@@ -998,7 +1027,7 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeBatchNormalization)
{outputInfo.GetShape()});
}
-BOOST_AUTO_TEST_CASE(SerializeDivision)
+BOOST_AUTO_TEST_CASE(SerializeDeserializeDivision)
{
class VerifyDivisionName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
{
@@ -1041,6 +1070,12 @@ BOOST_AUTO_TEST_CASE(SerializeDivision)
VerifyDivisionName nameChecker;
deserializedNetwork->Accept(nameChecker);
+
+ CheckDeserializedNetworkAgainstOriginal<float>(*deserializedNetwork,
+ *network,
+ {info.GetShape(), info.GetShape()},
+ {info.GetShape()},
+ {0, 1});
}
BOOST_AUTO_TEST_CASE(SerializeDeserializeNormalization)
@@ -1173,7 +1208,7 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializePad)
{outputTensorInfo.GetShape()});
}
-BOOST_AUTO_TEST_CASE(SerializeRsqrt)
+BOOST_AUTO_TEST_CASE(SerializeDeserializeRsqrt)
{
class VerifyRsqrtName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
{
@@ -1374,4 +1409,58 @@ BOOST_AUTO_TEST_CASE(SerializeDeserializeMean)
{outputTensorInfo.GetShape()});
}
+BOOST_AUTO_TEST_CASE(SerializeDeserializeMerger)
+{
+ class VerifyMergerName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+ {
+ public:
+ void VisitMergerLayer(const armnn::IConnectableLayer* layer,
+ const armnn::OriginsDescriptor& mergerDescriptor,
+ const char* name = nullptr) override
+ {
+ BOOST_TEST(name == "MergerLayer");
+ }
+ };
+
+ unsigned int inputShapeOne[] = {2, 3, 2, 2};
+ unsigned int inputShapeTwo[] = {2, 3, 2, 2};
+ unsigned int outputShape[] = {4, 3, 2, 2};
+
+ const armnn::TensorInfo inputOneTensorInfo = armnn::TensorInfo(4, inputShapeOne, armnn::DataType::Float32);
+ const armnn::TensorInfo inputTwoTensorInfo = armnn::TensorInfo(4, inputShapeTwo, armnn::DataType::Float32);
+ const armnn::TensorInfo outputTensorInfo = armnn::TensorInfo(4, outputShape, armnn::DataType::Float32);
+
+ std::vector<armnn::TensorShape> shapes;
+ shapes.push_back(inputOneTensorInfo.GetShape());
+ shapes.push_back(inputTwoTensorInfo.GetShape());
+
+ armnn::MergerDescriptor descriptor =
+ armnn::CreateMergerDescriptorForConcatenation(shapes.begin(), shapes.end(), 0);
+
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer* const inputLayerOne = network->AddInputLayer(0);
+ inputLayerOne->GetOutputSlot(0).SetTensorInfo(inputOneTensorInfo);
+ armnn::IConnectableLayer* const inputLayerTwo = network->AddInputLayer(1);
+ inputLayerTwo->GetOutputSlot(0).SetTensorInfo(inputTwoTensorInfo);
+ armnn::IConnectableLayer* const mergerLayer = network->AddMergerLayer(descriptor, "MergerLayer");
+ mergerLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+
+ inputLayerOne->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
+ inputLayerTwo->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
+ mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ VerifyMergerName nameChecker;
+ deserializedNetwork->Accept(nameChecker);
+
+ CheckDeserializedNetworkAgainstOriginal<float>(*deserializedNetwork,
+ *network,
+ {inputOneTensorInfo.GetShape(), inputTwoTensorInfo.GetShape()},
+ {outputTensorInfo.GetShape()},
+ {0, 1});
+}
+
BOOST_AUTO_TEST_SUITE_END()