aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/NetworkQuantizer.cpp
diff options
context:
space:
mode:
authorFrancis Murtagh <francis.murtagh@arm.com>2021-02-15 18:23:17 +0000
committerFrancis Murtagh <francis.murtagh@arm.com>2021-02-15 18:23:17 +0000
commit3d2b4b2bff3be27f12a99e0e01284078870ee954 (patch)
tree33a9ea2a3267707088fd0a4a727d73a4568bb0a6 /src/armnn/NetworkQuantizer.cpp
parent052fbe9c86628cfdc534c515d9b451aa8d3d1cb6 (diff)
downloadarmnn-3d2b4b2bff3be27f12a99e0e01284078870ee954.tar.gz
IVGCVSW-4873 Implement Pimpl Idiom for INetwork and IOptimizedNetwork
!android-nn-driver:5042 Signed-off-by: Kevin May <kevin.may@arm.com> Change-Id: Ia1ce8b839e81b46428ba0f78463e085e5906958d Signed-off-by: Francis Murtagh <francis.murtagh@arm.com> Signed-off-by: Finn Williams <Finn.Williams@arm.com>
Diffstat (limited to 'src/armnn/NetworkQuantizer.cpp')
-rw-r--r--src/armnn/NetworkQuantizer.cpp6
1 files changed, 3 insertions, 3 deletions
diff --git a/src/armnn/NetworkQuantizer.cpp b/src/armnn/NetworkQuantizer.cpp
index eed3f41bdc..06d8c5d0f2 100644
--- a/src/armnn/NetworkQuantizer.cpp
+++ b/src/armnn/NetworkQuantizer.cpp
@@ -50,7 +50,7 @@ void INetworkQuantizer::Destroy(INetworkQuantizer *quantizer)
void NetworkQuantizer::OverrideInputRange(LayerBindingId layerId, float min, float max)
{
- const Graph& graph = PolymorphicDowncast<const Network*>(m_InputNetwork)->GetGraph();
+ const Graph& graph = m_InputNetwork->pNetworkImpl->GetGraph();
auto inputLayers = graph.GetInputLayers();
// Walk the input layers of the graph and override the quantization parameters of the one with the given id
@@ -69,7 +69,7 @@ void NetworkQuantizer::Refine(const InputTensors& inputTensors)
{
m_RefineCount = 0;
m_Ranges.SetDynamicMode(true);
- const Graph& cGraph = PolymorphicDowncast<const Network*>(m_InputNetwork)->GetGraph().TopologicalSort();
+ const Graph& cGraph = m_InputNetwork->pNetworkImpl->GetGraph().TopologicalSort();
// need to insert Debug layers in the DynamicQuantizationStrategy
Graph& graph = const_cast<Graph&>(cGraph);
@@ -136,7 +136,7 @@ void NetworkQuantizer::Refine(const InputTensors& inputTensors)
INetworkPtr NetworkQuantizer::ExportNetwork()
{
- const Graph& graph = PolymorphicDowncast<const Network*>(m_InputNetwork)->GetGraph().TopologicalSort();
+ const Graph& graph = m_InputNetwork->pNetworkImpl->GetGraph().TopologicalSort();
// Step 1) Walk the graph and populate default min/max values for
// intermediate tensors, only if Runtime does not exist (created