aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2019-06-12 13:04:11 +0100
committerAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2019-06-12 13:04:11 +0100
commit1ea3fcffa6438acd9096fb0a48ad0b288c3d8cf3 (patch)
treeb3c9de0833ab0e5b6e176524c80e04ba0135224a
parentaa06714b9f49fe36716fd3c116aadd892fadd4ac (diff)
downloadarmnn-1ea3fcffa6438acd9096fb0a48ad0b288c3d8cf3.tar.gz
IVGCVSW-3261 Add Quantizer support for SpaceToDepth layer
Change-Id: Ic08e38fe10ca1abd79c6e4b1a83dc9a929686b56 Signed-off-by: Aron Virginas-Tar <Aron.Virginas-Tar@arm.com>
-rw-r--r--src/armnn/QuantizerVisitor.cpp9
-rw-r--r--src/armnn/QuantizerVisitor.hpp4
-rw-r--r--src/armnn/test/QuantizerTest.cpp51
3 files changed, 60 insertions, 4 deletions
diff --git a/src/armnn/QuantizerVisitor.cpp b/src/armnn/QuantizerVisitor.cpp
index 7ba56757c2..feb9205199 100644
--- a/src/armnn/QuantizerVisitor.cpp
+++ b/src/armnn/QuantizerVisitor.cpp
@@ -385,6 +385,15 @@ void QuantizerVisitor::VisitSpaceToBatchNdLayer(const IConnectableLayer* layer,
SetQuantizedInputConnections(layer, newLayer);
}
+void QuantizerVisitor::VisitSpaceToDepthLayer(const IConnectableLayer* layer,
+ const SpaceToDepthDescriptor& spaceToDepthDescriptor,
+ const char* name)
+{
+ IConnectableLayer* newLayer = m_QuantizedNetwork->AddSpaceToDepthLayer(spaceToDepthDescriptor, name);
+ RecordLayer(layer, newLayer);
+ SetQuantizedInputConnections(layer, newLayer);
+}
+
void QuantizerVisitor::VisitSplitterLayer(const IConnectableLayer* layer,
const SplitterDescriptor& splitterDescriptor,
const char* name)
diff --git a/src/armnn/QuantizerVisitor.hpp b/src/armnn/QuantizerVisitor.hpp
index 6463350f2b..ab773d42eb 100644
--- a/src/armnn/QuantizerVisitor.hpp
+++ b/src/armnn/QuantizerVisitor.hpp
@@ -118,6 +118,10 @@ public:
const SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor,
const char* name = nullptr) override;
+ void VisitSpaceToDepthLayer(const IConnectableLayer* layer,
+ const SpaceToDepthDescriptor& spaceToDepthDescriptor,
+ const char* name = nullptr) override;
+
void VisitSplitterLayer(const IConnectableLayer* layer,
const SplitterDescriptor& splitterDescriptor,
const char* name = nullptr) override;
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index 213018ab85..48602d31f7 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -1132,6 +1132,52 @@ BOOST_AUTO_TEST_CASE(QuantizeSpaceToBatch)
VisitLayersTopologically(quantizedNetworkQSymm16.get(), validatorQSymm16);
}
+BOOST_AUTO_TEST_CASE(QuantizeSpaceToDepth)
+{
+ class TestSpaceToDepthQuantization : public TestLeakyReLuActivationQuantization
+ {
+ public:
+ TestSpaceToDepthQuantization(const TensorShape& inputShape, const TensorShape& outputShape)
+ : TestLeakyReLuActivationQuantization(inputShape, outputShape)
+ {}
+
+ TestSpaceToDepthQuantization(const QuantizerOptions& options,
+ const TensorShape& inputShape,
+ const TensorShape& outputShape)
+ : TestLeakyReLuActivationQuantization(options, inputShape, outputShape)
+ {}
+
+ void VisitSpaceToDepthLayer(const IConnectableLayer* layer,
+ const SpaceToDepthDescriptor&,
+ const char* = nullptr) override
+ {
+ TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
+ TestQuantizationParams(info,
+ { 30.0f / g_Asymm8QuantizationBase, 128 },
+ { 15.0f / g_Symm16QuantizationBase, 0 });
+ }
+ };
+
+ INetworkPtr network = INetwork::Create();
+
+ const TensorShape shape{ 1u };
+ TensorInfo info(shape, DataType::Float32);
+
+ IConnectableLayer* activation = CreateStartOfLeakyReluNetwork(network.get(), info);
+ IConnectableLayer* spaceToDepth = network->AddSpaceToDepthLayer(SpaceToDepthDescriptor());
+
+ CompleteLeakyReluNetwork(network.get(), activation, spaceToDepth, info);
+
+ INetworkPtr quantizedNetworkQAsymm8 = INetworkQuantizer::Create(network.get())->ExportNetwork();
+ TestSpaceToDepthQuantization validatorQAsymm8(shape, shape);
+ VisitLayersTopologically(quantizedNetworkQAsymm8.get(), validatorQAsymm8);
+
+ const QuantizerOptions options(DataType::QuantisedSymm16);
+ INetworkPtr quantizedNetworkQSymm16 = INetworkQuantizer::Create(network.get(), options)->ExportNetwork();
+ TestSpaceToDepthQuantization validatorQSymm16(options, shape, shape);
+ VisitLayersTopologically(quantizedNetworkQSymm16.get(), validatorQSymm16);
+}
+
BOOST_AUTO_TEST_CASE(QuantizePooling2d)
{
class TestPooling2dQuantization : public TestLeakyReLuActivationQuantization
@@ -1556,10 +1602,7 @@ std::vector<uint8_t> SetupQuantize(float value)
armnn::TensorInfo inputInfo({ 1, 2, 2 }, armnn::DataType::Float32);
inputInfo.SetQuantizationScale(1.0f);
inputInfo.SetQuantizationOffset(1);
- std::vector<float> input({
- value, 0.0f,
- 0.0f, 1.0f
- });
+ std::vector<float> input({ value, 0.0f, 0.0f, 1.0f });
const std::vector<float> &inputRef = input;
auto output = QuantizedVector<uint8_t>(inputInfo.GetQuantizationScale(),