From b619b42118f54c2db1946aef46477926efa4b75c Mon Sep 17 00:00:00 2001 From: Francis Murtagh Date: Mon, 27 Jun 2022 12:44:50 +0100 Subject: IVGCVSW-7034 Modified SubgraphView returned by GetWorkingCopy() * Add virtual GetSlotIndex to IInputSlot * Fix logic in GetWorkingCopy to use index of slots; so as not to add slots to cloned subgraphView if not in original subgraphView * Add test to cover cases when not all inputSlots to subgraphView layer are part of the original subgraphView * Mark SubgraphView::GetWorkingCopy() as const Change-Id: I1d540f84c57f97f6c834ec06ca13393ffa55d379 --- src/armnn/Layer.hpp | 4 +- src/armnn/SubgraphView.cpp | 91 +++++++++++++---------------------- src/armnn/test/SubgraphViewTests.cpp | 93 ++++++++++++++++++++++++++++++++++++ 3 files changed, 129 insertions(+), 59 deletions(-) (limited to 'src/armnn') diff --git a/src/armnn/Layer.hpp b/src/armnn/Layer.hpp index 12c782c965..b144c78889 100644 --- a/src/armnn/Layer.hpp +++ b/src/armnn/Layer.hpp @@ -51,7 +51,7 @@ public: ~InputSlot(); Layer& GetOwningLayer() const { return m_OwningLayer; } - unsigned int GetSlotIndex() const { return m_SlotIndex; } + unsigned int GetSlotIndex() const override { return m_SlotIndex; } const OutputSlot* GetConnectedOutputSlot() const { return m_Connection; } OutputSlot* GetConnectedOutputSlot() { return m_Connection; } @@ -73,7 +73,7 @@ public: // Inserts single-output existing layer at this point in the graph. void Insert(Layer& layer); - // IInputSlot + // InputSlot const IOutputSlot* GetConnection() const override; IOutputSlot* GetConnection() override; diff --git a/src/armnn/SubgraphView.cpp b/src/armnn/SubgraphView.cpp index 5f972a9767..804ff731fb 100644 --- a/src/armnn/SubgraphView.cpp +++ b/src/armnn/SubgraphView.cpp @@ -416,7 +416,7 @@ public: }; -SubgraphView SubgraphView::GetWorkingCopy() +SubgraphView SubgraphView::GetWorkingCopy() const { if (p_WorkingCopyImpl) { @@ -426,79 +426,63 @@ SubgraphView SubgraphView::GetWorkingCopy() // Create a cut down SubgraphView with underlying graph containing only the relevant layers. // It needs its own underlying layers so that they can be replaced safely. - Graph newGraph = Graph(); + auto ptr = std::make_shared(Graph()); + std::unordered_map originalToClonedLayerMap; std::list originalSubgraphLayers = GetIConnectableLayers(); - auto ptr = std::make_shared(std::move(newGraph)); - SubgraphView::IInputSlots workingCopyInputs; - for (auto&& originalLayer : originalSubgraphLayers) { Layer* const layer = PolymorphicDowncast(originalLayer)->Clone(ptr->m_Graph); originalToClonedLayerMap.emplace(originalLayer, layer); } + SubgraphView::IInputSlots workingCopyInputs; // Add IInputSlots to workingCopy - std::vector processed; for (auto originalSubgraphInputSlot : GetIInputSlots()) { const IConnectableLayer& originalSubgraphLayer = PolymorphicDowncast(originalSubgraphInputSlot)->GetOwningLayer(); - // Only need process Slots of layer once - if (std::find(processed.begin(), processed.end(), &originalSubgraphLayer) == processed.end()) - { - IConnectableLayer* clonedLayer = originalToClonedLayerMap[&originalSubgraphLayer]; + auto* clonedLayer = originalToClonedLayerMap[&originalSubgraphLayer]; - // Add the InputSlot to WorkingCopy InputSlots - for (unsigned int i = 0; i < clonedLayer->GetNumInputSlots(); i++) - { - workingCopyInputs.push_back(&clonedLayer->GetInputSlot(i)); - } - processed.push_back(&originalSubgraphLayer); - } + workingCopyInputs.push_back(&clonedLayer->GetInputSlot(originalSubgraphInputSlot->GetSlotIndex())); } - // Empty processed - processed.clear(); for (auto originalSubgraphLayer : originalSubgraphLayers) { IConnectableLayer* const clonedLayer = originalToClonedLayerMap[originalSubgraphLayer]; - // connect all cloned layers as per original subgraph - for (unsigned int i = 0; i < clonedLayer->GetNumOutputSlots(); i++) + // OutputLayers have no OutputSlots to be connected + if (clonedLayer->GetType() != LayerType::Output) { - // OutputLayers have no OutputSlots to be connected - if (clonedLayer->GetType() != LayerType::Output) + // connect all cloned layers as per original subgraph + for (unsigned int i = 0; i < clonedLayer->GetNumOutputSlots(); i++) { - auto& outputSlot = clonedLayer->GetOutputSlot(i); - for (unsigned int k = 0; k < originalSubgraphLayer->GetNumOutputSlots(); k++) + auto& originalOutputSlot = originalSubgraphLayer->GetOutputSlot(i); + auto& clonedOutputSlot = clonedLayer->GetOutputSlot(i); + for (unsigned int j = 0; j < originalOutputSlot.GetNumConnections(); j++) { - auto& originalOutputSlot = originalSubgraphLayer->GetOutputSlot(k); - for (unsigned int j = 0; j < originalOutputSlot.GetNumConnections(); j++) + // nextLayer is the layer with IInputSlot connected to IOutputSlot we are working on + const IConnectableLayer& nextLayerOnOriginalSubgraph = + originalOutputSlot.GetConnection(j)->GetOwningIConnectableLayer(); + + // Check the layer is in our map and so has a clonedLayer + if (originalToClonedLayerMap.find(&nextLayerOnOriginalSubgraph) != originalToClonedLayerMap.end()) { - // nextLayer is the layer with IInputSlot connected to IOutputSlot we are working on - const IConnectableLayer& nextLayer = - originalOutputSlot.GetConnection(j)->GetOwningIConnectableLayer(); - - // Check the layer is in our map and so has a clonedLayer - if (originalToClonedLayerMap.find(&nextLayer) != originalToClonedLayerMap.end()) - { - IConnectableLayer* newGraphTargetLayer = originalToClonedLayerMap[&nextLayer]; - - IInputSlot& inputSlot = - newGraphTargetLayer->GetInputSlot( - PolymorphicDowncast( - &originalOutputSlot)->GetConnection(j)->GetSlotIndex()); - - // Then make the connection - outputSlot.Connect(inputSlot); - } + auto* nextLayerOnClonedSubgraph = originalToClonedLayerMap[&nextLayerOnOriginalSubgraph]; + + auto index = PolymorphicDowncast( + &originalOutputSlot)->GetConnection(j)->GetSlotIndex(); + + IInputSlot& inputSlot = nextLayerOnClonedSubgraph->GetInputSlot(index); + + // Then make the connection + clonedOutputSlot.Connect(inputSlot); } - // Copy the tensorInfo to the clonedOutputSlot - outputSlot.SetTensorInfo(originalOutputSlot.GetTensorInfo()); } + // Copy the tensorInfo to the clonedOutputSlot + clonedOutputSlot.SetTensorInfo(originalOutputSlot.GetTensorInfo()); } } } @@ -508,25 +492,18 @@ SubgraphView SubgraphView::GetWorkingCopy() // Add IOutputSlots to workingCopy for (auto outputSlot : GetIOutputSlots()) { - + auto outputSlotIndex = outputSlot->CalculateIndexOnOwner(); const IConnectableLayer& originalSubgraphLayer = outputSlot->GetOwningIConnectableLayer(); // OutputLayers have no OutputSlots to be connected - // Only need process Slots of layer once - if (originalSubgraphLayer.GetType() != LayerType::Output && - std::find(processed.begin(), processed.end(), &originalSubgraphLayer) == processed.end()) + if (originalSubgraphLayer.GetType() != LayerType::Output) { IConnectableLayer* clonedLayer = originalToClonedLayerMap[&originalSubgraphLayer]; - // Add the OutputSlot to WorkingCopy InputSlots - for (unsigned int i = 0; i < clonedLayer->GetNumOutputSlots(); i++) - { - workingCopyOutputs.push_back(&clonedLayer->GetOutputSlot(i)); - } - processed.push_back(&originalSubgraphLayer); + // Add the OutputSlot of clonedLayer to WorkingCopy OutputSlots + workingCopyOutputs.push_back(&clonedLayer->GetOutputSlot(outputSlotIndex)); } } - processed.clear(); SubgraphView::IConnectableLayers workingCopyLayers; for (auto& pair : originalToClonedLayerMap) diff --git a/src/armnn/test/SubgraphViewTests.cpp b/src/armnn/test/SubgraphViewTests.cpp index feeea5d478..e1181004d9 100644 --- a/src/armnn/test/SubgraphViewTests.cpp +++ b/src/armnn/test/SubgraphViewTests.cpp @@ -2291,4 +2291,97 @@ TEST_CASE("SubgraphViewWorkingCopyReplaceSlots") ); } +TEST_CASE("SubgraphViewWorkingCopyCloneInputAndOutputSlots") +{ + Graph graph; + + const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0); + const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true); + const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0); + + std::vector constData(constInfo.GetNumElements(), 0); + std::iota(constData.begin(), constData.end(), 0); + ConstTensor constTensor(constInfo, constData); + + // Add the original pattern + IConnectableLayer* input = graph.AddLayer(0, "input"); + auto constant = graph.AddLayer("const"); + + constant->m_LayerOutput = std::make_shared(constTensor); + IConnectableLayer* mul = graph.AddLayer("mul"); + armnn::ViewsDescriptor splitterDesc(2,4); + IConnectableLayer* split = graph.AddLayer(splitterDesc, "split"); + IConnectableLayer* abs = graph.AddLayer(ActivationFunction::Abs, "abs"); + IConnectableLayer* relu = graph.AddLayer(ActivationFunction::ReLu, "relu"); + armnn::OriginsDescriptor concatDesc(2, 4); + IConnectableLayer* concat = graph.AddLayer(concatDesc, "constant"); + IConnectableLayer* output = graph.AddLayer(0, "output"); + + // Create connections between layers + input->GetOutputSlot(0).SetTensorInfo(inputInfo); + constant->GetOutputSlot(0).SetTensorInfo(constInfo); + mul->GetOutputSlot(0).SetTensorInfo(outputInfo); + + input->GetOutputSlot(0).Connect(mul->GetInputSlot(1)); + constant->GetOutputSlot(0).Connect(mul->GetInputSlot(0)); + mul->GetOutputSlot(0).Connect(split->GetInputSlot(0)); + split->GetOutputSlot(0).Connect(abs->GetInputSlot(0)); + split->GetOutputSlot(1).Connect(relu->GetInputSlot(0)); + abs->GetOutputSlot(0).Connect(concat->GetInputSlot(0)); + relu->GetOutputSlot(0).Connect(concat->GetInputSlot(1)); + concat->GetOutputSlot(0).Connect(output->GetInputSlot(0)); + + // constant input // + // \ / // + // mul // + // | // + // splitter // + // / \ // + // abs relu // + // \ / // + // concat // + // | // + // output // + // // + // SubgraphView layers: constant mul splitter abs + + // Add just the InputSlot connected to the InputLayer to the SubgraphView's InputSlots + SubgraphView::IInputSlots inputSlots; + inputSlots.push_back(&mul->GetInputSlot(1)); + + // Add just the OutputSlot connected to the splitter and abs to the SubgraphView's InputSlots + SubgraphView::IOutputSlots outputSlots; + outputSlots.push_back(&split->GetOutputSlot(1)); + outputSlots.push_back(&abs->GetOutputSlot(0)); + + //Add in out of order + auto view = CreateSubgraphViewFrom({constant, mul, split, abs}, + std::move(inputSlots), + std::move(outputSlots)); + + SubgraphView workingCopy = view->GetWorkingCopy(); + + // Check that only 1 input slot is added. + CHECK(workingCopy.GetIInputSlots().size() == 1); + CHECK(workingCopy.GetIInputSlots()[0]->GetSlotIndex() == 1); + + CHECK(workingCopy.GetIOutputSlots().size() == 2); + CHECK(workingCopy.GetIOutputSlots()[0]->GetOwningIConnectableLayer().GetType() == armnn::LayerType::Splitter); + CHECK(workingCopy.GetIOutputSlots()[1]->GetOwningIConnectableLayer().GetType() == armnn::LayerType::Activation); + + // Check the WorkingCopy is as expected before replacement + CHECK(workingCopy.GetIConnectableLayers().size() == 4); + int idx=0; + LayerType expectedSorted[] = {LayerType::Constant, + LayerType::Multiplication, + LayerType::Splitter, + LayerType::Activation}; + workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l) + { + CHECK((expectedSorted[idx] == l->GetType())); + idx++; + } + ); +} + } -- cgit v1.2.1