aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/test/QuantizerTest.cpp
diff options
context:
space:
mode:
authorDerek Lamberti <derek.lamberti@arm.com>2019-02-07 11:14:11 +0000
committerDerek Lamberti <derek.lamberti@arm.com>2019-02-07 13:21:28 +0000
commit857aa45407df9dbe99a11d03a4be2b20bd0110ae (patch)
tree3e47a2aa1ac8787a00900eff0ba49246ef9a4bdc /src/armnn/test/QuantizerTest.cpp
parent49dbe0e9f6747583cff29ada68d6670796d4216c (diff)
downloadarmnn-857aa45407df9dbe99a11d03a4be2b20bd0110ae.tar.gz
IVGCVSW-2609 Quantize BatchNormalizationLayer
Change-Id: I7b847112a0322ffc8b88a0708d8439bfb97cfe2c Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
Diffstat (limited to 'src/armnn/test/QuantizerTest.cpp')
-rw-r--r--src/armnn/test/QuantizerTest.cpp83
1 files changed, 83 insertions, 0 deletions
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index 56b1497967..fbafbd8f1e 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -92,5 +92,88 @@ BOOST_AUTO_TEST_CASE(QuantizeAddition)
VisitLayersTopologically(quantizedNetwork.get(), validator);
}
+BOOST_AUTO_TEST_CASE(QuantizeBatchNorm)
+{
+
+ class TestQuantization : public LayerVisitorBase<VisitorThrowingPolicy>
+ {
+ public:
+ virtual void VisitBatchNormalizationLayer(const IConnectableLayer* layer,
+ const BatchNormalizationDescriptor& desc,
+ const ConstTensor& mean,
+ const ConstTensor& variance,
+ const ConstTensor& beta,
+ const ConstTensor& gamma,
+ const char* name = nullptr)
+ {
+ TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
+
+ BOOST_TEST((info.GetDataType() == DataType::QuantisedAsymm8));
+
+ BOOST_TEST((info.GetQuantizationOffset() == 128));
+
+ // Based off current static value [-15.0f, 15.0f]
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 30.0f/255.0f, 0.000001f );
+
+ //Test constants
+ BOOST_TEST((mean.GetInfo().GetDataType() == DataType::QuantisedAsymm8));
+ BOOST_TEST((variance.GetInfo().GetDataType() == DataType::QuantisedAsymm8));
+ BOOST_TEST((beta.GetInfo().GetDataType() == DataType::QuantisedAsymm8));
+ BOOST_TEST((gamma.GetInfo().GetDataType() == DataType::QuantisedAsymm8));
+
+ BOOST_CHECK_CLOSE(mean.GetInfo().GetQuantizationScale(), 3.0f/255.0f, 0.000001f);
+ BOOST_CHECK_CLOSE(variance.GetInfo().GetQuantizationScale(), 3.0f/255.0f, 0.000001f);
+ BOOST_CHECK_CLOSE(beta.GetInfo().GetQuantizationScale(), 3.0f/255.0f, 0.000001f);
+ BOOST_CHECK_CLOSE(gamma.GetInfo().GetQuantizationScale(), 3.0f/255.0f, 0.000001f);
+
+ BOOST_TEST((mean.GetInfo().GetQuantizationOffset() == 85));
+ }
+
+ virtual void VisitInputLayer(const IConnectableLayer* layer,
+ LayerBindingId id,
+ const char* name = nullptr)
+ {}
+
+ virtual void VisitOutputLayer(const IConnectableLayer* layer,
+ LayerBindingId id,
+ const char* name = nullptr)
+ {}
+ };
+
+ auto network = INetwork::Create();
+
+ TensorShape shape{3U};
+ TensorInfo info(shape, DataType::Float32);
+
+ std::vector<float> meanData{-1.0f, 1.5f, 2.0f};
+ std::vector<float> varData{-1.0f, 1.5f, 2.0f};
+ std::vector<float> betaData{-1.0f, 1.5f, 2.0f};
+ std::vector<float> gammaData{-1.0f, 1.5f, 2.0f};
+
+ ConstTensor mean(info, meanData);
+ ConstTensor var(info, varData);
+ ConstTensor beta(info, betaData);
+ ConstTensor gamma(info, gammaData);
+
+ BatchNormalizationDescriptor desc;
+
+ // Add the layers
+ IConnectableLayer* input0 = network->AddInputLayer(0);
+ IConnectableLayer* batchNorm = network->AddBatchNormalizationLayer(desc, mean, var, beta, gamma);
+ IConnectableLayer* output = network->AddOutputLayer(1);
+
+ // Establish connections
+ input0->GetOutputSlot(0).Connect(batchNorm->GetInputSlot(0));
+ batchNorm->GetOutputSlot(0).Connect(output->GetInputSlot(0));
+
+ //Set TensorInfo
+ input0->GetOutputSlot(0).SetTensorInfo(info);
+ batchNorm->GetOutputSlot(0).SetTensorInfo(info);
+
+ auto quantizedNetwork = INetworkQuantizer::Create(network.get())->ExportNetwork();
+ TestQuantization validator;
+ VisitLayersTopologically(quantizedNetwork.get(), validator);
+}
+
BOOST_AUTO_TEST_SUITE_END()
} //namespace armnn