aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDerek Lamberti <derek.lamberti@arm.com>2019-03-27 16:51:31 +0000
committerDerek Lamberti <derek.lamberti@arm.com>2019-03-27 16:51:31 +0000
commit87acb2733718526fc49e279386e2d85d60a74c8b (patch)
tree01194e1ab1b9116e5ee8d5f0dc33f9696ab60c35
parenta9cca6aa935b3e290181a05fdb2c5f5557a49c09 (diff)
downloadarmnn-87acb2733718526fc49e279386e2d85d60a74c8b.tar.gz
IVGCVSW-2870 Serialize quantize layer
Change-Id: I2cff85a3cb4d47aa09227a6810812a142f2aedd3 Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
-rw-r--r--src/armnnDeserializer/Deserializer.cpp22
-rw-r--r--src/armnnDeserializer/Deserializer.hpp1
-rw-r--r--src/armnnSerializer/ArmnnSchema.fbs10
-rw-r--r--src/armnnSerializer/Serializer.cpp8
-rw-r--r--src/armnnSerializer/Serializer.hpp3
-rw-r--r--src/armnnSerializer/test/SerializerTests.cpp36
6 files changed, 78 insertions, 2 deletions
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp
index d64bed7409..ff5bf8ba4e 100644
--- a/src/armnnDeserializer/Deserializer.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -211,6 +211,7 @@ m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
+ m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
@@ -281,6 +282,8 @@ Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPt
return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
case Layer::Layer_Pooling2dLayer:
return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
+ case Layer::Layer_QuantizeLayer:
+ return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
case Layer::Layer_ReshapeLayer:
return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
case Layer::Layer_ResizeBilinearLayer:
@@ -1495,6 +1498,25 @@ void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
RegisterOutputSlots(graph, layerIndex, layer);
}
+void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
+{
+ CHECK_LAYERS(graph, 0, layerIndex);
+
+ auto inputs = GetInputs(graph, layerIndex);
+ CHECK_VALID_SIZE(inputs.size(), 1);
+
+ auto outputs = GetOutputs(graph, layerIndex);
+ CHECK_VALID_SIZE(outputs.size(), 1);
+ auto outputInfo = ToTensorInfo(outputs[0]);
+
+ auto layerName = GetLayerName(graph, layerIndex);
+ IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
+ layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
+
+ RegisterInputSlots(graph, layerIndex, layer);
+ RegisterOutputSlots(graph, layerIndex, layer);
+}
+
armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
const std::vector<uint32_t>& targetDimsIn)
{
diff --git a/src/armnnDeserializer/Deserializer.hpp b/src/armnnDeserializer/Deserializer.hpp
index 6454643f98..5d57dfc02d 100644
--- a/src/armnnDeserializer/Deserializer.hpp
+++ b/src/armnnDeserializer/Deserializer.hpp
@@ -103,6 +103,7 @@ private:
void ParsePad(GraphPtr graph, unsigned int layerIndex);
void ParsePermute(GraphPtr graph, unsigned int layerIndex);
void ParsePooling2d(GraphPtr graph, unsigned int layerIndex);
+ void ParseQuantize(GraphPtr graph, unsigned int layerIndex);
void ParseReshape(GraphPtr graph, unsigned int layerIndex);
void ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex);
void ParseRsqrt(GraphPtr graph, unsigned int layerIndex);
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs
index 2cceaae031..7ac83598e1 100644
--- a/src/armnnSerializer/ArmnnSchema.fbs
+++ b/src/armnnSerializer/ArmnnSchema.fbs
@@ -116,7 +116,8 @@ enum LayerType : uint {
L2Normalization = 31,
Splitter = 32,
DetectionPostProcess = 33,
- Lstm = 34
+ Lstm = 34,
+ Quantize = 35
}
// Base layer table to be used as part of other layers
@@ -265,6 +266,10 @@ table Pooling2dDescriptor {
dataLayout:DataLayout;
}
+table QuantizeLayer {
+ base:LayerBase;
+}
+
table SoftmaxLayer {
base:LayerBase;
descriptor:SoftmaxDescriptor;
@@ -549,7 +554,8 @@ union Layer {
L2NormalizationLayer,
SplitterLayer,
DetectionPostProcessLayer,
- LstmLayer
+ LstmLayer,
+ QuantizeLayer
}
table AnyLayer {
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 2fd840258e..83777c9849 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -681,6 +681,14 @@ void SerializerVisitor::VisitPooling2dLayer(const armnn::IConnectableLayer* laye
CreateAnyLayer(fbPooling2dLayer.o, serializer::Layer::Layer_Pooling2dLayer);
}
+void SerializerVisitor::VisitQuantizeLayer(const armnn::IConnectableLayer *layer, const char *name)
+{
+ auto fbQuantizeBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Quantize);
+ auto fbQuantizeLayer = serializer::CreateQuantizeLayer(m_flatBufferBuilder,
+ fbQuantizeBaseLayer);
+ CreateAnyLayer(fbQuantizeLayer.o, serializer::Layer::Layer_QuantizeLayer);
+}
+
// Build FlatBuffer for FullyConnected Layer
void SerializerVisitor::VisitFullyConnectedLayer(const armnn::IConnectableLayer* layer,
const armnn::FullyConnectedDescriptor& fullyConnectedDescriptor,
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index c1a1305ff4..82e19316e9 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -149,6 +149,9 @@ public:
const armnn::Pooling2dDescriptor& pooling2dDescriptor,
const char* name = nullptr) override;
+ void VisitQuantizeLayer(const armnn::IConnectableLayer* layer,
+ const char* name = nullptr) override;
+
void VisitReshapeLayer(const armnn::IConnectableLayer* layer,
const armnn::ReshapeDescriptor& reshapeDescriptor,
const char* name = nullptr) override;
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index e3ce6d29d3..0345e53bcb 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -1542,6 +1542,42 @@ BOOST_AUTO_TEST_CASE(SerializePooling2d)
deserializedNetwork->Accept(verifier);
}
+BOOST_AUTO_TEST_CASE(SerializeQuantize)
+{
+ class QuantizeLayerVerifier : public LayerVerifierBase
+ {
+ public:
+ QuantizeLayerVerifier(const std::string& layerName,
+ const std::vector<armnn::TensorInfo>& inputInfos,
+ const std::vector<armnn::TensorInfo>& outputInfos)
+ : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
+
+ void VisitQuantizeLayer(const armnn::IConnectableLayer* layer, const char* name) override
+ {
+ VerifyNameAndConnections(layer, name);
+ }
+ };
+
+ const std::string layerName("quantize");
+ const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
+
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const quantizeLayer = network->AddQuantizeLayer(layerName.c_str());
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+
+ inputLayer->GetOutputSlot(0).Connect(quantizeLayer->GetInputSlot(0));
+ quantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+ inputLayer->GetOutputSlot(0).SetTensorInfo(info);
+ quantizeLayer->GetOutputSlot(0).SetTensorInfo(info);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ QuantizeLayerVerifier verifier(layerName, {info}, {info});
+ deserializedNetwork->Accept(verifier);
+}
BOOST_AUTO_TEST_CASE(SerializeReshape)
{
class ReshapeLayerVerifier : public LayerVerifierBase