aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/test/QuantizerTest.cpp
diff options
context:
space:
mode:
authorAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2019-09-20 16:38:01 +0100
committerFrancis Murtagh <francis.murtagh@arm.com>2019-09-23 09:35:31 +0000
commita96479a46c87099d1a55a75233286e61a5ae02d5 (patch)
tree3af6d47e9fc728c2d510e148e39e0385ee8a19f4 /src/armnn/test/QuantizerTest.cpp
parentfcb8ef6b36873d06ddae7553aad28e726aa5be33 (diff)
downloadarmnn-a96479a46c87099d1a55a75233286e61a5ae02d5.tar.gz
IVGCVSW-3887 Add Quantizer support for DepthToSpace
Signed-off-by: Aron Virginas-Tar <Aron.Virginas-Tar@arm.com> Change-Id: I144c04f920ba3329886367f5e73fe758505ab96e
Diffstat (limited to 'src/armnn/test/QuantizerTest.cpp')
-rw-r--r--src/armnn/test/QuantizerTest.cpp57
1 files changed, 57 insertions, 0 deletions
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index a569c24aaf..6a217f3037 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -638,6 +638,63 @@ BOOST_AUTO_TEST_CASE(QuantizeBatchNorm)
VisitLayersTopologically(quantizedNetworkQSymm16.get(), validatorQSymm16);
}
+BOOST_AUTO_TEST_CASE(QuantizeDepthToSpace)
+{
+ class TestDepthToSpaceQuantization : public TestQuantization
+ {
+ public:
+ TestDepthToSpaceQuantization(const TensorShape& inputShape, const TensorShape& outputShape)
+ : TestQuantization(inputShape, outputShape) {}
+
+ TestDepthToSpaceQuantization(const QuantizerOptions& options,
+ const TensorShape& inputShape,
+ const TensorShape& outputShape)
+ : TestQuantization(options, inputShape, outputShape) {}
+
+ virtual void VisitDepthToSpaceLayer(const IConnectableLayer* layer,
+ const DepthToSpaceDescriptor& desc,
+ const char* name = nullptr)
+ {
+ const TensorInfo& info = layer->GetOutputSlot(0).GetTensorInfo();
+
+ const OffsetScalePair qAsymm8Params{ 30.0f / g_Asymm8QuantizationBase, 128 };
+ const OffsetScalePair qSymm16Params{ 15.0f / g_Symm16QuantizationBase, 0 };
+
+ TestQuantizationParams(info, qAsymm8Params, qSymm16Params);
+ }
+ };
+
+ const TensorShape inputShape { 1, 2, 2, 4 };
+ const TensorShape outputShape{ 1, 4, 4, 1 };
+
+ const TensorInfo inputInfo (inputShape, DataType::Float32);
+ const TensorInfo outputInfo(outputShape, DataType::Float32);
+
+ INetworkPtr network = INetwork::Create();
+ const DepthToSpaceDescriptor descriptor(2, armnn::DataLayout::NHWC);
+
+ IConnectableLayer* inputLayer = network->AddInputLayer(0);
+ IConnectableLayer* depthToSpaceLayer = network->AddDepthToSpaceLayer(descriptor);
+ IConnectableLayer* outputLayer = network->AddOutputLayer(0);
+
+ inputLayer->GetOutputSlot(0).Connect(depthToSpaceLayer->GetInputSlot(0));
+ depthToSpaceLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+ inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
+ depthToSpaceLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
+
+ // test QAsymm8 quantization
+ INetworkPtr quantizedNetworkQAsymm8 = INetworkQuantizer::Create(network.get())->ExportNetwork();
+ TestDepthToSpaceQuantization validatorQAsymm8(inputShape, outputShape);
+ VisitLayersTopologically(quantizedNetworkQAsymm8.get(), validatorQAsymm8);
+
+ // test QSymm16 quantization
+ const QuantizerOptions options(DataType::QuantisedSymm16);
+ INetworkPtr quantizedNetworkQSymm16 = INetworkQuantizer::Create(network.get(), options)->ExportNetwork();
+ TestDepthToSpaceQuantization validatorQSymm16(options, inputShape, outputShape);
+ VisitLayersTopologically(quantizedNetworkQSymm16.get(), validatorQSymm16);
+}
+
BOOST_AUTO_TEST_CASE(OverrideInputRangeEmptyNetwork)
{
RangeTracker ranges;