aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJim Flynn <jim.flynn@arm.com>2019-02-14 09:56:36 +0000
committerMatteo Martincigh <matteo.martincigh@arm.com>2019-02-15 15:00:11 +0000
commite870bef8c93a3c711362b3530aa49c353f048d01 (patch)
treee2c63e9ef64b345c837ecfd656ecd42f14ac2221
parent43e7864cf81cd55fb53f8b98b4d387da859eae0d (diff)
downloadarmnn-e870bef8c93a3c711362b3530aa49c353f048d01.tar.gz
IVGCVSW-2621 Add static quantization of Merger
Change-Id: I19f01698a6f9b361cd1737d76e6ec2020fab77a6 Signed-off-by: Jim Flynn <jim.flynn@arm.com>
-rw-r--r--src/armnn/QuantizerVisitor.cpp9
-rw-r--r--src/armnn/QuantizerVisitor.hpp4
-rw-r--r--src/armnn/StaticRangeVisitor.cpp17
-rw-r--r--src/armnn/StaticRangeVisitor.hpp4
-rw-r--r--src/armnn/test/QuantizerTest.cpp77
5 files changed, 110 insertions, 1 deletions
diff --git a/src/armnn/QuantizerVisitor.cpp b/src/armnn/QuantizerVisitor.cpp
index 8e3265fb75..88062068e8 100644
--- a/src/armnn/QuantizerVisitor.cpp
+++ b/src/armnn/QuantizerVisitor.cpp
@@ -233,6 +233,15 @@ void QuantizerVisitor::VisitPooling2dLayer(const IConnectableLayer* layer,
SetQuantizedInputConnections(layer, newLayer);
}
+void QuantizerVisitor::VisitMergerLayer(const IConnectableLayer* layer,
+ const OriginsDescriptor& mergerDescriptor,
+ const char* name)
+{
+ IConnectableLayer* newLayer = m_QuantizedNetwork->AddMergerLayer(mergerDescriptor, name);
+ RecordLayer(layer, newLayer);
+ SetQuantizedInputConnections(layer, newLayer);
+}
+
void QuantizerVisitor::VisitSoftmaxLayer(const IConnectableLayer* layer,
const SoftmaxDescriptor& softmaxDescriptor,
const char* name)
diff --git a/src/armnn/QuantizerVisitor.hpp b/src/armnn/QuantizerVisitor.hpp
index 1beaf5adde..ac90bfb1e5 100644
--- a/src/armnn/QuantizerVisitor.hpp
+++ b/src/armnn/QuantizerVisitor.hpp
@@ -84,6 +84,10 @@ public:
const ConstTensor& input,
const char* name = nullptr) override;
+ void VisitMergerLayer(const IConnectableLayer* layer,
+ const OriginsDescriptor& mergerDescriptor,
+ const char* name = nullptr) override;
+
/// Extract the quantized network
INetworkPtr RetrieveFinalNetwork() { return std::move(m_QuantizedNetwork); }
diff --git a/src/armnn/StaticRangeVisitor.cpp b/src/armnn/StaticRangeVisitor.cpp
index cc1255e56e..44b05ca6cb 100644
--- a/src/armnn/StaticRangeVisitor.cpp
+++ b/src/armnn/StaticRangeVisitor.cpp
@@ -177,7 +177,24 @@ void StaticRangeVisitor::VisitConstantLayer(const IConnectableLayer* layer,
min = std::min(min, inputValue);
max = std::max(max, inputValue);
}
+ SetRange(layer, 0, min, max);
+}
+void StaticRangeVisitor::VisitMergerLayer(const IConnectableLayer* layer,
+ const OriginsDescriptor& mergerDescriptor,
+ const char* name)
+{
+ float min = std::numeric_limits<float>::max();
+ float max = std::numeric_limits<float>::lowest();
+ for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
+ {
+ const IOutputSlot* outputSlot = layer->GetInputSlot(i).GetConnection();
+ LayerGuid layerId = outputSlot->GetOwningLayerGuid();
+ unsigned int slotIndex = outputSlot->CalculateIndexOnOwner();
+ RangeTracker::MinMaxRange range = m_RangeTracker.GetRange(layerId, slotIndex);
+ min = std::min(min, range.first);
+ max = std::max(max, range.second);
+ }
SetRange(layer, 0, min, max);
}
diff --git a/src/armnn/StaticRangeVisitor.hpp b/src/armnn/StaticRangeVisitor.hpp
index 2f80dcb45b..7576e96032 100644
--- a/src/armnn/StaticRangeVisitor.hpp
+++ b/src/armnn/StaticRangeVisitor.hpp
@@ -75,6 +75,10 @@ public:
const ConstTensor& input,
const char* name = nullptr) override;
+ void VisitMergerLayer(const IConnectableLayer* layer,
+ const OriginsDescriptor& mergerDescriptor,
+ const char* name = nullptr) override;
+
private:
/// Set the range for an output slot on a layer
void SetRange(const IConnectableLayer* layer, unsigned int outputIdx, float min, float max);
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index 6820e14f16..a46b443fa7 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -293,7 +293,7 @@ protected:
BOOST_TEST((info.GetQuantizationOffset() == 64));
// Based off parent LeakyReLu [-5.f, 15.f]
- BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 20.0f/255.0f, 0.000001f);
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 20.0f/g_QuantizationBase, g_TestTolerance);
}
};
@@ -977,5 +977,80 @@ BOOST_AUTO_TEST_CASE(QuantizeConstant)
VisitLayersTopologically(quantizedNetwork.get(), validator);
}
+BOOST_AUTO_TEST_CASE(QuantizeMerger)
+{
+ class TestMergerVisitor : public LayerVisitorBase<VisitorThrowingPolicy>
+ {
+ public:
+ TestMergerVisitor(float min, float max) : m_Min(min), m_Max(max) {}
+
+ virtual void VisitInputLayer(const IConnectableLayer* layer,
+ LayerBindingId id,
+ const char* name = nullptr)
+ {}
+ virtual void VisitOutputLayer(const IConnectableLayer* layer,
+ LayerBindingId id,
+ const char* name = nullptr)
+ {}
+ virtual void VisitMergerLayer(const IConnectableLayer* layer,
+ const OriginsDescriptor& mergerDescriptor,
+ const char* name = nullptr)
+ {
+ std::pair<int, float> expectedValues = ComputeQAsymmParams(8, m_Min, m_Max);
+
+ TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
+
+ BOOST_TEST((info.GetDataType() == DataType::QuantisedAsymm8));
+
+ BOOST_TEST((info.GetQuantizationOffset() == expectedValues.first));
+
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), expectedValues.second, 0.000001f);
+ }
+
+ private:
+ float m_Min;
+ float m_Max;
+ };
+
+ INetworkPtr network = INetwork::Create();
+
+ IConnectableLayer* input0 = network->AddInputLayer(0);
+ IConnectableLayer* input1 = network->AddInputLayer(1);
+ IConnectableLayer* input2 = network->AddInputLayer(2);
+
+ OriginsDescriptor descriptor(3, 1);
+ IConnectableLayer* merger = network->AddMergerLayer(descriptor);
+
+ IConnectableLayer* output0 = network->AddOutputLayer(3);
+
+ // Establish connections
+ input0->GetOutputSlot(0).Connect(merger->GetInputSlot(0));
+ input1->GetOutputSlot(0).Connect(merger->GetInputSlot(1));
+ input2->GetOutputSlot(0).Connect(merger->GetInputSlot(2));
+ merger->GetOutputSlot(0).Connect(output0->GetInputSlot(0));
+
+ // Set TensorInfo
+ TensorShape shape{1U};
+ TensorInfo info(shape, DataType::Float32);
+
+ input0->GetOutputSlot(0).SetTensorInfo(info);
+ input1->GetOutputSlot(0).SetTensorInfo(info);
+ input2->GetOutputSlot(0).SetTensorInfo(info);
+ merger->GetOutputSlot(0).SetTensorInfo(info);
+
+ INetworkQuantizerPtr quantizerPtr = INetworkQuantizer::Create(network.get());
+ // Override the input ranges
+ float min = -15.5f;
+ float max = 45.3f;
+
+ quantizerPtr->OverrideInputRange(0, (min + 2.1f), (max - 3.2f));
+ quantizerPtr->OverrideInputRange(1, (min + 6.7f), max);
+ quantizerPtr->OverrideInputRange(2, min, (max - 7.8f));
+
+ auto quantizedNetwork = quantizerPtr->ExportNetwork();
+ TestMergerVisitor validator(min, max);
+ VisitLayersTopologically(quantizedNetwork.get(), validator);
+}
+
BOOST_AUTO_TEST_SUITE_END()
} // namespace armnn