aboutsummaryrefslogtreecommitdiff
path: root/src/armnnSerializer
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnnSerializer')
-rw-r--r--src/armnnSerializer/ArmnnSchema.fbs11
-rw-r--r--src/armnnSerializer/Serializer.cpp23
-rw-r--r--src/armnnSerializer/Serializer.hpp3
-rw-r--r--src/armnnSerializer/SerializerSupport.md1
-rw-r--r--src/armnnSerializer/test/SerializerTests.cpp26
5 files changed, 58 insertions, 6 deletions
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs
index 6a388db699..e1b6e1f768 100644
--- a/src/armnnSerializer/ArmnnSchema.fbs
+++ b/src/armnnSerializer/ArmnnSchema.fbs
@@ -59,6 +59,7 @@ table TensorInfo {
quantizationOffset:int = 0;
quantizationScales:[float];
quantizationDim:uint;
+ dimensionality:uint = 1;
}
struct Connection {
@@ -157,7 +158,8 @@ enum LayerType : uint {
ElementwiseUnary = 54,
Transpose = 55,
QLstm = 56,
- Fill = 57
+ Fill = 57,
+ Rank = 58
}
// Base layer table to be used as part of other layers
@@ -859,6 +861,10 @@ table StandInLayer {
descriptor:StandInDescriptor;
}
+table RankLayer {
+ base:LayerBase;
+}
+
union Layer {
ActivationLayer,
AdditionLayer,
@@ -917,7 +923,8 @@ union Layer {
ElementwiseUnaryLayer,
TransposeLayer,
QLstmLayer,
- FillLayer
+ FillLayer,
+ RankLayer
}
table AnyLayer {
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 6555a34be7..8a1f7716a1 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -2,7 +2,6 @@
// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
// SPDX-License-Identifier: MIT
//
-
#include "Serializer.hpp"
#include <armnn/Descriptors.hpp>
@@ -851,6 +850,16 @@ void SerializerVisitor::VisitPermuteLayer(const armnn::IConnectableLayer* layer,
CreateAnyLayer(flatBufferPermuteLayer.o, serializer::Layer::Layer_PermuteLayer);
}
+// Build FlatBuffer for Rank Layer
+void SerializerVisitor::VisitRankLayer(const armnn::IConnectableLayer* layer,
+ const char* name)
+{
+ IgnoreUnused(name);
+ auto flatBufferBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Rank);
+ auto flatBufferRankLayer = serializer::CreateRankLayer(m_flatBufferBuilder, flatBufferBaseLayer);
+
+ CreateAnyLayer(flatBufferRankLayer.o, serializer::Layer::Layer_RankLayer);
+}
// Build FlatBuffer for Reshape Layer
void SerializerVisitor::VisitReshapeLayer(const armnn::IConnectableLayer* layer,
const armnn::ReshapeDescriptor& reshapeDescriptor,
@@ -1584,7 +1593,9 @@ flatbuffers::Offset<TensorInfo> SerializerVisitor::CreateTensorInfo(const armnn
tensorInfo.GetQuantizationScales()[0],
tensorInfo.GetQuantizationOffset(),
m_flatBufferBuilder.CreateVector(tensorInfo.GetQuantizationScales()),
- tensorInfo.GetQuantizationDim().value());
+ tensorInfo.GetQuantizationDim().value(),
+ static_cast<unsigned int>
+ (tensorInfo.GetShape().GetDimensionality()));
return flatBufferTensorInfo;
}
@@ -1593,7 +1604,11 @@ flatbuffers::Offset<TensorInfo> SerializerVisitor::CreateTensorInfo(const armnn
m_flatBufferBuilder.CreateVector(shape),
GetFlatBufferDataType(tensorInfo.GetDataType()),
tensorInfo.GetQuantizationScale(),
- tensorInfo.GetQuantizationOffset());
+ tensorInfo.GetQuantizationOffset(),
+ 0,
+ 0,
+ static_cast<unsigned int>
+ (tensorInfo.GetShape().GetDimensionality()));
return flatBufferTensorInfo;
}
@@ -1742,4 +1757,4 @@ bool Serializer::SaveSerializedToStream(std::ostream& stream)
return !stream.bad();
}
-} // namespace armnnSerializer \ No newline at end of file
+} // namespace armnnSerializer
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index e4104dda8e..babecdc056 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -219,6 +219,9 @@ public:
const armnn::QuantizedLstmInputParams& params,
const char* name = nullptr) override;
+ void VisitRankLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr) override;
+
void VisitReshapeLayer(const armnn::IConnectableLayer* layer,
const armnn::ReshapeDescriptor& reshapeDescriptor,
const char* name = nullptr) override;
diff --git a/src/armnnSerializer/SerializerSupport.md b/src/armnnSerializer/SerializerSupport.md
index 4f7868bee7..438335341a 100644
--- a/src/armnnSerializer/SerializerSupport.md
+++ b/src/armnnSerializer/SerializerSupport.md
@@ -44,6 +44,7 @@ The Arm NN SDK Serializer currently supports the following layers:
* QLstm
* Quantize
* QuantizedLstm
+* Rank
* Reshape
* Resize
* Slice
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index 088282a18a..e059511fc2 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -2202,6 +2202,32 @@ BOOST_AUTO_TEST_CASE(SerializeQuantize)
deserializedNetwork->Accept(verifier);
}
+BOOST_AUTO_TEST_CASE(SerializeRank)
+{
+ DECLARE_LAYER_VERIFIER_CLASS(Rank)
+
+ const std::string layerName("rank");
+ const armnn::TensorInfo inputInfo({1, 9}, armnn::DataType::Float32);
+ const armnn::TensorInfo outputInfo({1}, armnn::DataType::Signed32);
+
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const rankLayer = network->AddRankLayer(layerName.c_str());
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+
+ inputLayer->GetOutputSlot(0).Connect(rankLayer->GetInputSlot(0));
+ rankLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+ inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
+ rankLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ RankLayerVerifier verifier(layerName, {inputInfo}, {outputInfo});
+ deserializedNetwork->Accept(verifier);
+}
+
BOOST_AUTO_TEST_CASE(SerializeReshape)
{
DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Reshape)