aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFrancis Murtagh <francis.murtagh@arm.com>2019-02-15 16:56:27 +0000
committerFrancis Murtagh <francis.murtagh@arm.com>2019-02-15 16:56:27 +0000
commitaa80d8a9f334254188aa244d3a438846f3996177 (patch)
tree5234de6e3990fdf1ca8cca5bb4e8000604ded3dc
parent8675b68e1ed73c56cd00c1eb3a9c9921a1898309 (diff)
downloadarmnn-aa80d8a9f334254188aa244d3a438846f3996177.tar.gz
IVGCVSW-2618 Support static quantization of Splitter
Change-Id: I8e44866336dcd5a9694309cf9bb954a4991e61fd Signed-off-by: Francis Murtagh <francis.murtagh@arm.com>
-rw-r--r--src/armnn/QuantizerVisitor.cpp9
-rw-r--r--src/armnn/QuantizerVisitor.hpp4
-rw-r--r--src/armnn/StaticRangeVisitor.cpp8
-rw-r--r--src/armnn/StaticRangeVisitor.hpp4
-rw-r--r--src/armnn/test/QuantizerTest.cpp41
5 files changed, 64 insertions, 2 deletions
diff --git a/src/armnn/QuantizerVisitor.cpp b/src/armnn/QuantizerVisitor.cpp
index 95b593971d..2ca164bf36 100644
--- a/src/armnn/QuantizerVisitor.cpp
+++ b/src/armnn/QuantizerVisitor.cpp
@@ -262,6 +262,15 @@ void QuantizerVisitor::VisitConstantLayer(const IConnectableLayer* layer,
RecordLayer(layer, newLayer);
}
+void QuantizerVisitor::VisitSplitterLayer(const IConnectableLayer* layer,
+ const SplitterDescriptor& splitterDescriptor,
+ const char* name)
+{
+ IConnectableLayer* newLayer = m_QuantizedNetwork->AddSplitterLayer(splitterDescriptor, name);
+ RecordLayer(layer, newLayer);
+ SetQuantizedInputConnections(layer, newLayer);
+}
+
void QuantizerVisitor::VisitReshapeLayer(const IConnectableLayer* layer,
const ReshapeDescriptor& reshapeDescriptor,
const char* name)
diff --git a/src/armnn/QuantizerVisitor.hpp b/src/armnn/QuantizerVisitor.hpp
index aaa888e5a0..79c44f2a60 100644
--- a/src/armnn/QuantizerVisitor.hpp
+++ b/src/armnn/QuantizerVisitor.hpp
@@ -76,6 +76,10 @@ public:
const SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor,
const char* name = nullptr) override;
+ void VisitSplitterLayer(const IConnectableLayer* layer,
+ const SplitterDescriptor& splitterDescriptor,
+ const char* name = nullptr) override;
+
void VisitPooling2dLayer(const IConnectableLayer* layer,
const Pooling2dDescriptor& pooling2dDescriptor,
const char* name = nullptr) override;
diff --git a/src/armnn/StaticRangeVisitor.cpp b/src/armnn/StaticRangeVisitor.cpp
index 4b715bdfb6..ad2de63d2e 100644
--- a/src/armnn/StaticRangeVisitor.cpp
+++ b/src/armnn/StaticRangeVisitor.cpp
@@ -207,4 +207,12 @@ void StaticRangeVisitor::VisitReshapeLayer(const IConnectableLayer* layer,
ForwardParentParameters(layer);
}
+void StaticRangeVisitor::VisitSplitterLayer(const IConnectableLayer* layer,
+ const SplitterDescriptor& splitterDescriptor,
+ const char* name)
+{
+ boost::ignore_unused(splitterDescriptor);
+ ForwardParentParameters(layer);
+}
+
} //namespace armnn
diff --git a/src/armnn/StaticRangeVisitor.hpp b/src/armnn/StaticRangeVisitor.hpp
index 145db20fc3..a42ad37286 100644
--- a/src/armnn/StaticRangeVisitor.hpp
+++ b/src/armnn/StaticRangeVisitor.hpp
@@ -83,6 +83,10 @@ public:
const ReshapeDescriptor& reshapeDescriptor,
const char* name = nullptr) override;
+ void VisitSplitterLayer(const IConnectableLayer* layer,
+ const SplitterDescriptor& splitterDescriptor,
+ const char* name = nullptr) override;
+
private:
/// Set the range for an output slot on a layer
void SetRange(const IConnectableLayer* layer, unsigned int outputIdx, float min, float max);
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index b73ac20457..1f6537d16a 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -791,7 +791,7 @@ IConnectableLayer* CreateStartOfLeakyReluNetwork(INetwork* network, const Tensor
// Establish connections
input0->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
- //Set TensorInfo
+ // Set TensorInfo
input0->GetOutputSlot(0).SetTensorInfo(info);
activation->GetOutputSlot(0).SetTensorInfo(info);
@@ -918,7 +918,7 @@ BOOST_AUTO_TEST_CASE(QuantizePooling2d)
activation->GetOutputSlot(0).Connect(pooling2d->GetInputSlot(0));
pooling2d->GetOutputSlot(0).Connect(output->GetInputSlot(0));
- //Set TensorInfo
+ // Set TensorInfo
input0->GetOutputSlot(0).SetTensorInfo(info);
activation->GetOutputSlot(0).SetTensorInfo(info);
pooling2d->GetOutputSlot(0).SetTensorInfo(info);
@@ -1083,5 +1083,42 @@ BOOST_AUTO_TEST_CASE(QuantizeReshape)
VisitLayersTopologically(quantizedNetwork.get(), validator);
}
+class TestSplitterQuantization : public TestLeakyReLuActivationQuantization
+{
+public:
+ virtual void VisitSplitterLayer(const IConnectableLayer* layer,
+ const SplitterDescriptor& desc,
+ const char* name = nullptr)
+ {
+ TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
+
+ BOOST_TEST((info.GetDataType() == DataType::QuantisedAsymm8));
+
+ BOOST_TEST((info.GetQuantizationOffset() == 64));
+
+ // Based off parent LeakyReLu [-5.f, 15.f]
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 20.0f/g_QuantizationBase, g_TestTolerance);
+ }
+};
+
+BOOST_AUTO_TEST_CASE(QuantizeSplitter)
+{
+ auto network = INetwork::Create();
+
+ TensorShape shape{3U};
+ TensorInfo info(shape, DataType::Float32);
+
+ IConnectableLayer* activation = CreateStartOfLeakyReluNetwork(network.get(), info);
+
+ // Add the layer under test
+ ViewsDescriptor splitterDesc(2,4);
+ IConnectableLayer* splitter = network->AddSplitterLayer(splitterDesc);
+ CompleteLeakyReluNetwork(network.get(), activation, splitter, info);
+
+ auto quantizedNetwork = INetworkQuantizer::Create(network.get())->ExportNetwork();
+ TestSplitterQuantization validator;
+ VisitLayersTopologically(quantizedNetwork.get(), validator);
+}
+
BOOST_AUTO_TEST_SUITE_END()
} // namespace armnn