From 7205fed8ae4d6f85b350f3612cc43ee7e25176a7 Mon Sep 17 00:00:00 2001 From: Derek Lamberti Date: Wed, 6 Feb 2019 16:20:46 +0000 Subject: IVGCVSW-2654 Test quantization of addition, input and output Change-Id: I6c61df213278e6ba65807932548bb2eaa159deba Signed-off-by: Derek Lamberti --- CMakeLists.txt | 1 + src/armnn/LayerVisitorBase.hpp | 112 +++++++++++++++++++++------------------ src/armnn/QuantizerVisitor.cpp | 14 ++--- src/armnn/QuantizerVisitor.hpp | 6 +-- src/armnn/StaticRangeVisitor.hpp | 2 +- src/armnn/test/QuantizerTest.cpp | 96 +++++++++++++++++++++++++++++++++ 6 files changed, 169 insertions(+), 62 deletions(-) create mode 100644 src/armnn/test/QuantizerTest.cpp diff --git a/CMakeLists.txt b/CMakeLists.txt index edf8ee6ddc..9eb184f4d1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -422,6 +422,7 @@ if(BUILD_UNIT_TESTS) src/armnn/test/OptionalTest.cpp src/armnn/test/ProfilerTests.cpp src/armnn/test/ProfilingEventTest.cpp + src/armnn/test/QuantizerTest.cpp src/armnn/test/RuntimeTests.cpp src/armnn/test/RuntimeTests.hpp src/armnn/test/SubGraphTests.cpp diff --git a/src/armnn/LayerVisitorBase.hpp b/src/armnn/LayerVisitorBase.hpp index 037a5a75ac..3b6a2ff578 100644 --- a/src/armnn/LayerVisitorBase.hpp +++ b/src/armnn/LayerVisitorBase.hpp @@ -10,7 +10,18 @@ namespace armnn { +struct VisitorThrowingPolicy +{ + static void Apply() { throw UnimplementedException(); } +}; + +struct VisitorNoThrowPolicy +{ + static void Apply() {} +}; + // Visitor base class with empty implementations. +template class LayerVisitorBase : public ILayerVisitor { protected: @@ -20,82 +31,82 @@ protected: public: virtual void VisitInputLayer(const IConnectableLayer*, LayerBindingId, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitConvolution2dLayer(const IConnectableLayer*, const Convolution2dDescriptor&, const ConstTensor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitConvolution2dLayer(const IConnectableLayer*, const Convolution2dDescriptor&, const ConstTensor&, const ConstTensor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitDepthwiseConvolution2dLayer(const IConnectableLayer*, const DepthwiseConvolution2dDescriptor&, const ConstTensor& , - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitDepthwiseConvolution2dLayer(const IConnectableLayer*, const DepthwiseConvolution2dDescriptor&, const ConstTensor&, const ConstTensor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitDetectionPostProcessLayer(const IConnectableLayer*, const DetectionPostProcessDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitFullyConnectedLayer(const IConnectableLayer*, const FullyConnectedDescriptor&, const ConstTensor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitFullyConnectedLayer(const IConnectableLayer*, const FullyConnectedDescriptor&, const ConstTensor&, const ConstTensor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitPermuteLayer(const IConnectableLayer*, const PermuteDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitBatchToSpaceNdLayer(const IConnectableLayer*, const BatchToSpaceNdDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitPooling2dLayer(const IConnectableLayer*, const Pooling2dDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitActivationLayer(const IConnectableLayer*, const ActivationDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitNormalizationLayer(const IConnectableLayer*, const NormalizationDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitSoftmaxLayer(const IConnectableLayer*, const SoftmaxDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitSplitterLayer(const IConnectableLayer*, const ViewsDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitMergerLayer(const IConnectableLayer*, const OriginsDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitAdditionLayer(const IConnectableLayer*, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitMultiplicationLayer(const IConnectableLayer*, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitBatchNormalizationLayer(const IConnectableLayer*, const BatchNormalizationDescriptor&, @@ -103,76 +114,75 @@ public: const ConstTensor&, const ConstTensor&, const ConstTensor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitResizeBilinearLayer(const IConnectableLayer*, const ResizeBilinearDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitL2NormalizationLayer(const IConnectableLayer*, const L2NormalizationDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitConstantLayer(const IConnectableLayer*, const ConstTensor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitReshapeLayer(const IConnectableLayer*, const ReshapeDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitSpaceToBatchNdLayer(const IConnectableLayer*, const SpaceToBatchNdDescriptor&, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitFloorLayer(const IConnectableLayer*, - const char*) {} + const char*) { DefaultPolicy::Apply(); } virtual void VisitOutputLayer(const IConnectableLayer*, LayerBindingId id, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitLstmLayer(const IConnectableLayer*, const LstmDescriptor&, const LstmInputParams&, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitDivisionLayer(const IConnectableLayer*, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitSubtractionLayer(const IConnectableLayer*, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitMaximumLayer(const IConnectableLayer*, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitMeanLayer(const IConnectableLayer*, const MeanDescriptor&, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitPadLayer(const IConnectableLayer*, const PadDescriptor&, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitStridedSliceLayer(const IConnectableLayer*, const StridedSliceDescriptor&, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitMinimumLayer(const IConnectableLayer*, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitGreaterLayer(const IConnectableLayer*, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitEqualLayer(const IConnectableLayer*, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitRsqrtLayer(const IConnectableLayer*, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } + virtual void VisitGatherLayer(const IConnectableLayer*, - const char*) {} - + const char*) { DefaultPolicy::Apply(); } }; } //namespace armnn diff --git a/src/armnn/QuantizerVisitor.cpp b/src/armnn/QuantizerVisitor.cpp index 7608d0a440..fd08b2d2e5 100644 --- a/src/armnn/QuantizerVisitor.cpp +++ b/src/armnn/QuantizerVisitor.cpp @@ -45,8 +45,6 @@ QuantizerVisitor::QuantizerVisitor(armnn::StaticRangeVisitor* ranges) void QuantizerVisitor::SetQuantizedInputConnections(const IConnectableLayer *srcLayer, IConnectableLayer *quantizedLayer) { - m_OldToNewGuidMap[srcLayer->GetGuid()] = quantizedLayer->GetGuid(); - for (unsigned int i=0; i < srcLayer->GetNumInputSlots(); i++) { const IInputSlot& srcInputSlot = srcLayer->GetInputSlot(i); @@ -74,6 +72,7 @@ void QuantizerVisitor::SetQuantizedInputConnections(const IConnectableLayer *src info.SetDataType(DataType::QuantisedAsymm8); info.SetQuantizationOffset(qParams.first); info.SetQuantizationScale(qParams.second); + newOutputSlot.SetTensorInfo(info); } else { @@ -83,28 +82,29 @@ void QuantizerVisitor::SetQuantizedInputConnections(const IConnectableLayer *src } } -void QuantizerVisitor::RecordLayer(IConnectableLayer* layer) +void QuantizerVisitor::RecordLayer(const IConnectableLayer* srcLayer, IConnectableLayer* quantizedLayer) { - m_GuidToLayerMap[layer->GetGuid()] = layer; + m_OldToNewGuidMap[srcLayer->GetGuid()] = quantizedLayer->GetGuid(); + m_GuidToLayerMap[quantizedLayer->GetGuid()] = quantizedLayer; } void QuantizerVisitor::VisitAdditionLayer(const IConnectableLayer *layer, const char *name) { IConnectableLayer* newLayer = m_QuantizedNetwork->AddAdditionLayer(name); - RecordLayer(newLayer); + RecordLayer(layer, newLayer); SetQuantizedInputConnections(layer, newLayer); } void QuantizerVisitor::VisitInputLayer(const IConnectableLayer *layer, LayerBindingId id, const char *name) { IConnectableLayer* newLayer = m_QuantizedNetwork->AddInputLayer(id, name); - RecordLayer(newLayer); + RecordLayer(layer, newLayer); } void QuantizerVisitor::VisitOutputLayer(const IConnectableLayer *layer, LayerBindingId id, const char *name) { IConnectableLayer* newLayer = m_QuantizedNetwork->AddOutputLayer(id, name); - RecordLayer(newLayer); + RecordLayer(layer, newLayer); SetQuantizedInputConnections(layer, newLayer); } diff --git a/src/armnn/QuantizerVisitor.hpp b/src/armnn/QuantizerVisitor.hpp index bf017d7205..5ff457ec33 100644 --- a/src/armnn/QuantizerVisitor.hpp +++ b/src/armnn/QuantizerVisitor.hpp @@ -18,7 +18,7 @@ namespace armnn class StaticRangeVisitor; /// Visitor object for quantizing layers in a network -class QuantizerVisitor : public LayerVisitorBase +class QuantizerVisitor : public LayerVisitorBase { public: QuantizerVisitor(StaticRangeVisitor* ranges); @@ -36,8 +36,8 @@ private: /// Connects the layer to preceeding layers and sets the quantization parameters based on recorded ranges void SetQuantizedInputConnections(const IConnectableLayer *srcLayer, IConnectableLayer *quantizedLayer); - /// Record the guid so we can easily find it later - void RecordLayer(IConnectableLayer* layer); + /// Record the guids so we can easily find the layers later + void RecordLayer(const IConnectableLayer* srcLayer, IConnectableLayer* qLayer); StaticRangeVisitor* m_Ranges; ///< Previously recorded min/max ranges per intermediate tensor diff --git a/src/armnn/StaticRangeVisitor.hpp b/src/armnn/StaticRangeVisitor.hpp index 38f0088523..af59dace9e 100644 --- a/src/armnn/StaticRangeVisitor.hpp +++ b/src/armnn/StaticRangeVisitor.hpp @@ -16,7 +16,7 @@ namespace armnn { /// Visitor class to establish min/max ranges based on the type of the layer -class StaticRangeVisitor : public LayerVisitorBase +class StaticRangeVisitor : public LayerVisitorBase { public: StaticRangeVisitor() = default; diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp new file mode 100644 index 0000000000..56b1497967 --- /dev/null +++ b/src/armnn/test/QuantizerTest.cpp @@ -0,0 +1,96 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include +#include +#include +#include + +#include "../LayerVisitorBase.hpp" +#include "../Network.hpp" +#include "../Graph.hpp" + +#include + +namespace armnn +{ +BOOST_AUTO_TEST_SUITE(Quantizer) + +void VisitLayersTopologically(const INetwork* inputNetwork, ILayerVisitor& visitor) +{ + auto network = boost::polymorphic_downcast(inputNetwork); + + auto graph = network->GetGraph().TopologicalSort(); + for (auto layer : graph) + { + layer->Accept(visitor); + } +} + +BOOST_AUTO_TEST_CASE(QuantizeAddition) +{ + class TestQuantization : public LayerVisitorBase + { + public: + virtual void VisitAdditionLayer(const IConnectableLayer* layer, + const char* name = nullptr) + { + TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo(); + + BOOST_TEST((info.GetDataType() == DataType::QuantisedAsymm8)); + + BOOST_TEST((info.GetQuantizationOffset() == 128)); + + // Based off current static value [-20.0f, 20.0f] + BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 40.0f/255.0f, 0.000001f ); + } + + virtual void VisitInputLayer(const IConnectableLayer* layer, + LayerBindingId id, + const char* name = nullptr) + { + TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo(); + + BOOST_TEST((info.GetDataType() == DataType::QuantisedAsymm8)); + + BOOST_TEST((info.GetQuantizationOffset() == 128)); + + // Based off current default [-15.0f, 15.0f] + BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 30.0f/255.0f, 0.000001f ); + } + + virtual void VisitOutputLayer(const IConnectableLayer* layer, + LayerBindingId id, + const char* name = nullptr) + {} + }; + + auto network = INetwork::Create(); + + // Add the layers + IConnectableLayer* input0 = network->AddInputLayer(0); + IConnectableLayer* input1 = network->AddInputLayer(1); + IConnectableLayer* addition = network->AddAdditionLayer(); + IConnectableLayer* output = network->AddOutputLayer(2); + + // Establish connections + input0->GetOutputSlot(0).Connect(addition->GetInputSlot(0)); + input1->GetOutputSlot(0).Connect(addition->GetInputSlot(1)); + addition->GetOutputSlot(0).Connect(output->GetInputSlot(0)); + + //Set TensorInfo + TensorShape shape{1U}; + TensorInfo info(shape, DataType::Float32); + input0->GetOutputSlot(0).SetTensorInfo(info); + input1->GetOutputSlot(0).SetTensorInfo(info); + addition->GetOutputSlot(0).SetTensorInfo(info); + + auto quantizedNetwork = INetworkQuantizer::Create(network.get())->ExportNetwork(); + TestQuantization validator; + VisitLayersTopologically(quantizedNetwork.get(), validator); +} + +BOOST_AUTO_TEST_SUITE_END() +} //namespace armnn -- cgit v1.2.1