diff options
Diffstat (limited to 'src/armnn/NetworkQuantizer.cpp')
-rw-r--r-- | src/armnn/NetworkQuantizer.cpp | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/src/armnn/NetworkQuantizer.cpp b/src/armnn/NetworkQuantizer.cpp index eed3f41bdc..06d8c5d0f2 100644 --- a/src/armnn/NetworkQuantizer.cpp +++ b/src/armnn/NetworkQuantizer.cpp @@ -50,7 +50,7 @@ void INetworkQuantizer::Destroy(INetworkQuantizer *quantizer) void NetworkQuantizer::OverrideInputRange(LayerBindingId layerId, float min, float max) { - const Graph& graph = PolymorphicDowncast<const Network*>(m_InputNetwork)->GetGraph(); + const Graph& graph = m_InputNetwork->pNetworkImpl->GetGraph(); auto inputLayers = graph.GetInputLayers(); // Walk the input layers of the graph and override the quantization parameters of the one with the given id @@ -69,7 +69,7 @@ void NetworkQuantizer::Refine(const InputTensors& inputTensors) { m_RefineCount = 0; m_Ranges.SetDynamicMode(true); - const Graph& cGraph = PolymorphicDowncast<const Network*>(m_InputNetwork)->GetGraph().TopologicalSort(); + const Graph& cGraph = m_InputNetwork->pNetworkImpl->GetGraph().TopologicalSort(); // need to insert Debug layers in the DynamicQuantizationStrategy Graph& graph = const_cast<Graph&>(cGraph); @@ -136,7 +136,7 @@ void NetworkQuantizer::Refine(const InputTensors& inputTensors) INetworkPtr NetworkQuantizer::ExportNetwork() { - const Graph& graph = PolymorphicDowncast<const Network*>(m_InputNetwork)->GetGraph().TopologicalSort(); + const Graph& graph = m_InputNetwork->pNetworkImpl->GetGraph().TopologicalSort(); // Step 1) Walk the graph and populate default min/max values for // intermediate tensors, only if Runtime does not exist (created |