aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCathal Corbett <cathal.corbett@arm.com>2021-12-15 17:12:59 +0000
committerCathal Corbett <cathal.corbett@arm.com>2021-12-23 13:21:22 +0000
commitcbfd718464b8ac41f0338ae6565d8213d24c0a2a (patch)
treef26da835108a0ed52ac0ffc8f7ebec64827b5033
parent81edc6217f76953c0be4c47f3d005cf48772ccb7 (diff)
downloadarmnn-cbfd718464b8ac41f0338ae6565d8213d24c0a2a.tar.gz
IVGCVSW-6632 OptimizationViews: has INetwork rather than Graph for holding layers
* Deprecate the GetGraph() function in OptimizationViews & remove/fix occurances where OptimizationViews.GetGraph() is called. * OptimizationViews has member INetworkPtr. * OptimizationViews has GetINetwork() method. * Unit test added to OptimizationViewsTests.cpp. Signed-off-by: Cathal Corbett <cathal.corbett@arm.com> Change-Id: Ifc1e53f1c34d786502279631942f0472f401038e
-rw-r--r--include/armnn/INetwork.hpp3
-rw-r--r--include/armnn/backends/OptimizationViews.hpp8
-rw-r--r--src/armnn/Network.cpp20
-rw-r--r--src/armnn/Network.hpp3
-rw-r--r--src/armnn/test/SubgraphViewTests.cpp8
-rw-r--r--src/backends/aclCommon/ArmComputeSubgraphUtils.hpp222
-rw-r--r--src/backends/backendsCommon/test/MockBackend.cpp12
-rw-r--r--src/backends/backendsCommon/test/OptimizationViewsTests.cpp71
-rw-r--r--src/backends/cl/ClBackend.cpp72
-rw-r--r--src/backends/neon/NeonBackend.cpp81
10 files changed, 378 insertions, 122 deletions
diff --git a/include/armnn/INetwork.hpp b/include/armnn/INetwork.hpp
index 4da01a62db..3e607256ec 100644
--- a/include/armnn/INetwork.hpp
+++ b/include/armnn/INetwork.hpp
@@ -424,7 +424,8 @@ public:
/// @return - Interface for configuring the layer.
IConnectableLayer* AddPrecompiledLayer(const PreCompiledDescriptor& preCompiledDescriptor,
CompiledBlobPtr& compiledBlobPtr,
- const Optional<BackendId>& backend);
+ const Optional<BackendId>& backend,
+ const char* name = nullptr);
/// Adds an activation layer to the network.
/// @param activationDescriptor - ActivationDescriptor to configure the activation.
diff --git a/include/armnn/backends/OptimizationViews.hpp b/include/armnn/backends/OptimizationViews.hpp
index c357c0c8bb..f3479febd3 100644
--- a/include/armnn/backends/OptimizationViews.hpp
+++ b/include/armnn/backends/OptimizationViews.hpp
@@ -56,8 +56,11 @@ public:
bool Validate(const SubgraphView& originalSubgraph) const;
+ ARMNN_DEPRECATED_MSG_REMOVAL_DATE("GetGraph is deprecated. Use GetINetwork instead.", "22.08")
Graph& GetGraph() { return m_Graph; }
+ INetworkPtr& GetINetwork() { return m_INetwork; }
+
private:
Substitutions m_SuccesfulOptimizations; ///< Proposed substitutions from successful optimizations
Subgraphs m_FailedOptimizations; ///< Subgraphs from the original subgraph which cannot be supported
@@ -65,6 +68,11 @@ private:
/// Graph object used only as a container for any layer generated by the optimization process
Graph m_Graph;
+
+ /// INetworkPtr object used only as a container for any layer generated by the optimization process
+ /// Also, can use to AddPrecompiledLayer to the SubstitutionPair
+ /// Use in favour of m_Graph which depreciates 22.08
+ INetworkPtr m_INetwork = INetwork::Create();;
};
} //namespace armnn
diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp
index 3198c22d9f..c0cfe426d2 100644
--- a/src/armnn/Network.cpp
+++ b/src/armnn/Network.cpp
@@ -215,10 +215,11 @@ IConnectableLayer* INetwork::AddPooling3dLayer(const Pooling3dDescriptor& poolin
}
IConnectableLayer* INetwork::AddPrecompiledLayer(const PreCompiledDescriptor& preCompiledDescriptor,
- CompiledBlobPtr& compiledBlobPtr,
- const Optional<BackendId>& backend)
+ CompiledBlobPtr& compiledBlobPtr,
+ const Optional<BackendId>& backend,
+ const char* name)
{
- return pNetworkImpl->AddPrecompiledLayer(preCompiledDescriptor, compiledBlobPtr, backend);
+ return pNetworkImpl->AddPrecompiledLayer(preCompiledDescriptor, compiledBlobPtr, backend, name);
}
IConnectableLayer* INetwork::AddActivationLayer(const ActivationDescriptor& activationDescriptor,
@@ -2772,10 +2773,19 @@ IConnectableLayer* NetworkImpl::AddUnidirectionalSequenceLstmLayer(
IConnectableLayer* NetworkImpl::AddPrecompiledLayer(const PreCompiledDescriptor& preCompiledDescriptor,
CompiledBlobPtr& compiledBlobPtr,
- const Optional<BackendId>& backend)
+ const Optional<BackendId>& backend,
+ const char* name)
{
// Method use is for backend users.
- const auto layer = m_Graph->AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
+ PreCompiledLayer* layer;
+ if (name)
+ {
+ layer = m_Graph->AddLayer<PreCompiledLayer>(preCompiledDescriptor, name);
+ }
+ else
+ {
+ layer = m_Graph->AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
+ }
// Assign the pre-compiled object to layer
// Pass only one compiled network, Arm NN does not handle multiple
diff --git a/src/armnn/Network.hpp b/src/armnn/Network.hpp
index 3fdc140099..db9c374c3b 100644
--- a/src/armnn/Network.hpp
+++ b/src/armnn/Network.hpp
@@ -172,7 +172,8 @@ public:
IConnectableLayer* AddPrecompiledLayer(const PreCompiledDescriptor& preCompiledDescriptor,
CompiledBlobPtr& compiledBlobPtr,
- const Optional<BackendId>& backend);
+ const Optional<BackendId>& backend,
+ const char* name = nullptr);
IConnectableLayer* AddPreluLayer(const char* name = nullptr);
diff --git a/src/armnn/test/SubgraphViewTests.cpp b/src/armnn/test/SubgraphViewTests.cpp
index a1a57c1b56..639e832b87 100644
--- a/src/armnn/test/SubgraphViewTests.cpp
+++ b/src/armnn/test/SubgraphViewTests.cpp
@@ -200,7 +200,9 @@ TEST_CASE("SingleInputSingleOutputAddPrecompiledLayerSubstituteSubgraph1")
// Construct dummy pre-compiled layer
INetworkPtr network = INetwork::Create();
- IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor, compiledBlobPtr, backend);
+ IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
+ compiledBlobPtr,
+ backend);
// Substitute sub-graph with pre-compiled layer
graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
@@ -242,7 +244,9 @@ TEST_CASE("SingleInputSingleOutputAddPrecompiledLayerSubstituteSubgraph2")
// Construct dummy pre-compiled layer
INetworkPtr network = INetwork::Create();
- IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor, compiledBlobPtr, backend);
+ IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
+ compiledBlobPtr,
+ backend);
SubgraphView substituteSubgraph(preCompiledLayer);
// Substitute sub-graph with pre-compiled layer
diff --git a/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp b/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp
index 521c17cd62..4367de1e28 100644
--- a/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp
+++ b/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp
@@ -91,15 +91,14 @@ inline void ReportUntouchedLayers(OptimizationViews& optimizationViews, std::map
}
template<typename LayerType>
-LayerType* FuseLayerWithoutParameters(OptimizationViews& optimizationViews,
- LayerType* baseLayer,
- ActivationLayer* activationLayer,
- ActivationDescriptor& activationDesc,
- std::string name)
+LayerType* FuseLayer(OptimizationViews& optimizationViews,
+ LayerType* baseLayer,
+ LayerType* replacementLayer,
+ ActivationLayer* activationLayer,
+ ActivationDescriptor& activationDesc)
{
- LayerType* replacementLayer = optimizationViews.GetGraph().AddLayer<LayerType>(name.c_str());
-
- replacementLayer->SetAdditionalInfoForObject(std::make_shared<ActivationDescriptor>(activationDesc));
+ replacementLayer->SetAdditionalInfoForObject(
+ std::make_shared<ActivationDescriptor>(activationDesc));
SubgraphView substitutionSubgraph(CreateInputsFrom({baseLayer}),
CreateOutputsFrom({activationLayer}),
@@ -107,42 +106,204 @@ LayerType* FuseLayerWithoutParameters(OptimizationViews& optimizationViews,
SubgraphView replacementSubgraph(replacementLayer);
optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
+
+ return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseAdditionLayer(OptimizationViews& optimizationViews,
+ LayerType* baseLayer,
+ ActivationLayer* activationLayer,
+ ActivationDescriptor& activationDesc,
+ std::string name)
+{
+ IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddAdditionLayer(name.c_str());
+ LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+ FuseLayer(optimizationViews,
+ baseLayer,
+ replacementLayer,
+ activationLayer,
+ activationDesc);
+
+ return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseSubtractionLayer(OptimizationViews& optimizationViews,
+ LayerType* baseLayer,
+ ActivationLayer* activationLayer,
+ ActivationDescriptor& activationDesc,
+ std::string name)
+{
+ IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddSubtractionLayer(name.c_str());
+ LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+ FuseLayer(optimizationViews,
+ baseLayer,
+ replacementLayer,
+ activationLayer,
+ activationDesc);
+
return replacementLayer;
}
template<typename LayerType>
-LayerType* FuseLayerWithParameters(OptimizationViews& optimizationViews,
+LayerType* FuseDivisionLayer(OptimizationViews& optimizationViews,
+ LayerType* baseLayer,
+ ActivationLayer* activationLayer,
+ ActivationDescriptor& activationDesc,
+ std::string name)
+{
+ IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddDivisionLayer(name.c_str());
+ LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+ FuseLayer(optimizationViews,
+ baseLayer,
+ replacementLayer,
+ activationLayer,
+ activationDesc);
+
+ return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseMultiplicationLayer(OptimizationViews& optimizationViews,
LayerType* baseLayer,
ActivationLayer* activationLayer,
ActivationDescriptor& activationDesc,
std::string name)
{
- LayerType* replacementLayer = optimizationViews.GetGraph().AddLayer<LayerType>(baseLayer->GetParameters(),
- name.c_str());
+ IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddMultiplicationLayer(name.c_str());
+ LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
- replacementLayer->SetAdditionalInfoForObject(std::make_shared<ActivationDescriptor>(activationDesc));
+ FuseLayer(optimizationViews,
+ baseLayer,
+ replacementLayer,
+ activationLayer,
+ activationDesc);
- SubgraphView substitutionSubgraph(CreateInputsFrom({baseLayer}),
- CreateOutputsFrom({activationLayer}),
- {baseLayer, activationLayer});
- SubgraphView replacementSubgraph(replacementLayer);
+ return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseBatchNormalizationLayer(OptimizationViews& optimizationViews,
+ LayerType* baseLayer,
+ ActivationLayer* activationLayer,
+ ActivationDescriptor& activationDesc,
+ std::string name)
+{
+ IConnectableLayer* replacement =
+ optimizationViews.GetINetwork()->AddBatchNormalizationLayer(baseLayer->GetParameters(),
+ ConstTensor(),
+ ConstTensor(),
+ ConstTensor(),
+ ConstTensor(),
+ name.c_str());
+ LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+ FuseLayer(optimizationViews,
+ baseLayer,
+ replacementLayer,
+ activationLayer,
+ activationDesc);
- optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
return replacementLayer;
}
template<typename LayerType>
-LayerType* FuseLayerWithWeightsAndBiases(OptimizationViews& optimizationViews,
- LayerType* baseLayer,
- ActivationLayer* activationLayer,
- ActivationDescriptor& activationDesc,
- std::string name)
-{
- LayerType* replacementLayer = FuseLayerWithParameters(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+LayerType* FuseConvolution2dLayer(OptimizationViews& optimizationViews,
+ LayerType* baseLayer,
+ ActivationLayer* activationLayer,
+ ActivationDescriptor& activationDesc,
+ std::string name)
+{
+ std::shared_ptr<ConstTensorHandle> weightHandle = baseLayer->m_Weight;
+ TensorInfo weightInfo = weightHandle->GetTensorInfo();
+
+ std::shared_ptr<ConstTensorHandle> biasHandle = baseLayer->m_Bias;
+ ConstTensor biasTensor;
+ if (!biasHandle)
+ {
+ biasTensor = ConstTensor();
+ }
+ else
+ {
+ biasTensor = ConstTensor(biasHandle->GetTensorInfo(), biasHandle->Map(true));
+ }
+
+ IConnectableLayer* replacement =
+ optimizationViews.GetINetwork()->
+ AddConvolution2dLayer(baseLayer->GetParameters(),
+ ConstTensor(weightInfo, weightHandle->Map(true)),
+ Optional<ConstTensor>(biasTensor),
+ name.c_str());
+ LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+ FuseLayer(optimizationViews,
+ baseLayer,
+ replacementLayer,
+ activationLayer,
+ activationDesc);
+
+ return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseDepthwiseConvolution2dLayer(OptimizationViews& optimizationViews,
+ LayerType* baseLayer,
+ ActivationLayer* activationLayer,
+ ActivationDescriptor& activationDesc,
+ std::string name)
+{
+ std::shared_ptr<ConstTensorHandle> weightHandle = baseLayer->m_Weight;
+ TensorInfo weightInfo = weightHandle->GetTensorInfo();
+
+ std::shared_ptr<ConstTensorHandle> biasHandle = baseLayer->m_Bias;
+ ConstTensor biasTensor;
+ if (!biasHandle)
+ {
+ biasTensor = ConstTensor();
+ }
+ else
+ {
+ biasTensor = ConstTensor(biasHandle->GetTensorInfo(), biasHandle->Map(true));
+ }
+
+ IConnectableLayer* replacement =
+ optimizationViews.GetINetwork()->
+ AddDepthwiseConvolution2dLayer(baseLayer->GetParameters(),
+ ConstTensor(weightInfo, weightHandle->Map(true)),
+ Optional<ConstTensor>(biasTensor),
+ name.c_str());
+ LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+ FuseLayer(optimizationViews,
+ baseLayer,
+ replacementLayer,
+ activationLayer,
+ activationDesc);
+
+ return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseFullyConnectedLayer(OptimizationViews& optimizationViews,
+ LayerType* baseLayer,
+ ActivationLayer* activationLayer,
+ ActivationDescriptor& activationDesc,
+ std::string name)
+{
+ IConnectableLayer* replacement =
+ optimizationViews.GetINetwork()->AddFullyConnectedLayer(baseLayer->GetParameters(),
+ name.c_str());
+ LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+ FuseLayer(optimizationViews,
+ baseLayer,
+ replacementLayer,
+ activationLayer,
+ activationDesc);
replacementLayer->m_Weight = std::move(baseLayer->m_Weight);
replacementLayer->m_Bias = std::move(baseLayer->m_Bias);
@@ -187,8 +348,9 @@ std::vector<Layer*> ChainReduceLayers(OptimizationViews& optimizationViews,
// Add new layer to graph.
std::string layerName = "reduce_layer_" + std::to_string(i);
- Layer* replacementLayer = optimizationViews.GetGraph().AddLayer<LayerType>(newReduceDescriptor,
- layerName.c_str());
+ Layer* replacementLayer = PolymorphicDowncast<Layer*>(
+ optimizationViews.GetINetwork()->AddReduceLayer(newReduceDescriptor,
+ layerName.c_str()));
// Connect previous layer with new layer.
// The first and last layer will be connected when the subgraph is replaced.
if (!layers.empty())
diff --git a/src/backends/backendsCommon/test/MockBackend.cpp b/src/backends/backendsCommon/test/MockBackend.cpp
index 4bdb8ce786..a34cfe98fe 100644
--- a/src/backends/backendsCommon/test/MockBackend.cpp
+++ b/src/backends/backendsCommon/test/MockBackend.cpp
@@ -187,12 +187,16 @@ OptimizationViews MockBackend::OptimizeSubgraphView(const SubgraphView& subgraph
{
ARMNN_ASSERT(supportedSubgraph != nullptr);
- PreCompiledLayer* preCompiledLayer =
- optimizationViews.GetGraph().AddLayer<PreCompiledLayer>(
+ CompiledBlobPtr blobPtr;
+ BackendId backend = MockBackendId();
+
+ IConnectableLayer* preCompiledLayer =
+ optimizationViews.GetINetwork()->AddPrecompiledLayer(
PreCompiledDescriptor(supportedSubgraph->GetNumInputSlots(),
supportedSubgraph->GetNumOutputSlots()),
- "pre-compiled");
- preCompiledLayer->SetBackendId(MockBackendId());
+ blobPtr,
+ backend,
+ nullptr);
SubgraphView substitutionSubgraph(*supportedSubgraph);
SubgraphView replacementSubgraph(preCompiledLayer);
diff --git a/src/backends/backendsCommon/test/OptimizationViewsTests.cpp b/src/backends/backendsCommon/test/OptimizationViewsTests.cpp
index bbae229927..c40c5131a8 100644
--- a/src/backends/backendsCommon/test/OptimizationViewsTests.cpp
+++ b/src/backends/backendsCommon/test/OptimizationViewsTests.cpp
@@ -55,7 +55,7 @@ TEST_CASE("OptimizedViewsSubgraphLayerCount")
{
OptimizationViews view;
// Construct a graph with 3 layers
- Graph& baseGraph = view.GetGraph();
+ Graph baseGraph;
Layer* const inputLayer = baseGraph.AddLayer<InputLayer>(0, "input");
@@ -119,11 +119,78 @@ TEST_CASE("OptimizedViewsSubgraphLayerCount")
CHECK(view.Validate(*originalSubgraph));
}
+
+TEST_CASE("OptimizedViewsSubgraphLayerCountUsingGetINetwork")
+{
+ OptimizationViews view;
+
+ IConnectableLayer* const inputLayer = view.GetINetwork()->AddInputLayer(0, "input");
+
+ DepthwiseConvolution2dDescriptor convDescriptor;
+ PreCompiledDescriptor substitutionLayerDescriptor(1, 1);
+ CompiledBlobPtr blobPtr;
+ BackendId backend = Compute::CpuRef;
+
+ Layer* convLayer1 = PolymorphicDowncast<Layer*>(
+ view.GetINetwork()->AddDepthwiseConvolution2dLayer(convDescriptor,
+ ConstTensor(),
+ Optional<ConstTensor>(),
+ "conv1"));
+
+ Layer* convLayer2 = PolymorphicDowncast<Layer*>(
+ view.GetINetwork()->AddDepthwiseConvolution2dLayer(convDescriptor,
+ ConstTensor(),
+ Optional<ConstTensor>(),
+ "conv2"));
+
+ IConnectableLayer* const outputLayer = view.GetINetwork()->AddOutputLayer(0, "output");
+
+ inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
+ convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
+ convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+ // Subgraph for a failed layer
+ SubgraphViewSelector::SubgraphViewPtr failedSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}),
+ CreateOutputsFrom({convLayer1}),
+ {convLayer1});
+ // Subgraph for an untouched layer
+ SubgraphViewSelector::SubgraphViewPtr untouchedSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer2}),
+ CreateOutputsFrom({convLayer2}),
+ {convLayer2});
+
+ // Create a Network containing a layer to substitute in
+ NetworkImpl net;
+ Layer* substitutionpreCompiledLayer = PolymorphicDowncast<Layer*>(
+ net.AddPrecompiledLayer(substitutionLayerDescriptor, blobPtr, backend));
+
+ // Subgraph for a substitution layer
+ SubgraphViewSelector::SubgraphViewPtr substitutionSubgraph =
+ CreateSubgraphViewFrom(CreateInputsFrom({substitutionpreCompiledLayer}),
+ CreateOutputsFrom({substitutionpreCompiledLayer}),
+ {substitutionpreCompiledLayer});
+
+ view.AddFailedSubgraph(SubgraphView(*failedSubgraph));
+ view.AddUntouchedSubgraph(SubgraphView(*untouchedSubgraph));
+
+ SubgraphViewSelector::SubgraphViewPtr baseSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}),
+ CreateOutputsFrom({convLayer2}),
+ {substitutionpreCompiledLayer});
+ view.AddSubstitution({*baseSubgraph, *substitutionSubgraph});
+
+ // Construct original subgraph to compare against
+ SubgraphViewSelector::SubgraphViewPtr originalSubgraph =
+ CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}),
+ CreateOutputsFrom({convLayer2}),
+ {convLayer1, convLayer2, substitutionpreCompiledLayer});
+
+ CHECK(view.Validate(*originalSubgraph));
+}
+
TEST_CASE("OptimizedViewsSubgraphLayerCountFailValidate")
{
OptimizationViews view;
// Construct a graph with 3 layers
- Graph& baseGraph = view.GetGraph();
+ Graph baseGraph;
Layer* const inputLayer = baseGraph.AddLayer<InputLayer>(0, "input");
diff --git a/src/backends/cl/ClBackend.cpp b/src/backends/cl/ClBackend.cpp
index 5c582695e5..339c1aa398 100644
--- a/src/backends/cl/ClBackend.cpp
+++ b/src/backends/cl/ClBackend.cpp
@@ -304,11 +304,11 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (status)
{
- FuseLayerWithWeightsAndBiases<Convolution2dLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseConvolution2dLayer<Convolution2dLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -335,11 +335,11 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (status)
{
- FuseLayerWithWeightsAndBiases<DepthwiseConvolution2dLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseDepthwiseConvolution2dLayer<DepthwiseConvolution2dLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -358,11 +358,11 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (status)
{
- FuseLayerWithWeightsAndBiases<FullyConnectedLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseFullyConnectedLayer<FullyConnectedLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -385,7 +385,7 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (status)
{
BatchNormalizationLayer* replacementLayer =
- FuseLayerWithParameters<BatchNormalizationLayer>(optimizationViews,
+ FuseBatchNormalizationLayer<BatchNormalizationLayer>(optimizationViews,
baseLayer,
activationLayer,
activationDesc,
@@ -411,11 +411,11 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (status)
{
- FuseLayerWithoutParameters<AdditionLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseAdditionLayer<AdditionLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -432,11 +432,11 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (status)
{
- FuseLayerWithoutParameters<DivisionLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseDivisionLayer<DivisionLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -453,11 +453,11 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (status)
{
- FuseLayerWithoutParameters<MultiplicationLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseMultiplicationLayer<MultiplicationLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -474,11 +474,11 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (status)
{
- FuseLayerWithoutParameters<SubtractionLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseSubtractionLayer<SubtractionLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
diff --git a/src/backends/neon/NeonBackend.cpp b/src/backends/neon/NeonBackend.cpp
index 0500ee3411..aa5ba03075 100644
--- a/src/backends/neon/NeonBackend.cpp
+++ b/src/backends/neon/NeonBackend.cpp
@@ -196,11 +196,11 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (status)
{
- FuseLayerWithWeightsAndBiases<Convolution2dLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseConvolution2dLayer<Convolution2dLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -227,11 +227,11 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (status)
{
- FuseLayerWithWeightsAndBiases<DepthwiseConvolution2dLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseDepthwiseConvolution2dLayer<DepthwiseConvolution2dLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -250,11 +250,11 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (status)
{
- FuseLayerWithWeightsAndBiases<FullyConnectedLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseFullyConnectedLayer<FullyConnectedLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -277,12 +277,11 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (status)
{
BatchNormalizationLayer* replacementLayer =
- FuseLayerWithParameters<BatchNormalizationLayer>(
- optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseBatchNormalizationLayer<BatchNormalizationLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
replacementLayer->m_Beta = std::move(baseLayer->m_Beta);
replacementLayer->m_Gamma = std::move(baseLayer->m_Gamma);
@@ -304,11 +303,11 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (status)
{
- FuseLayerWithoutParameters<AdditionLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseAdditionLayer<AdditionLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -325,11 +324,11 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (status)
{
- FuseLayerWithoutParameters<DivisionLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseDivisionLayer<DivisionLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -346,11 +345,11 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (status)
{
- FuseLayerWithoutParameters<MultiplicationLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseMultiplicationLayer<MultiplicationLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}
@@ -367,11 +366,11 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (status)
{
- FuseLayerWithoutParameters<SubtractionLayer>(optimizationViews,
- baseLayer,
- activationLayer,
- activationDesc,
- name);
+ FuseSubtractionLayer<SubtractionLayer>(optimizationViews,
+ baseLayer,
+ activationLayer,
+ activationDesc,
+ name);
untouched.erase(baseLayer->GetGuid());
untouched.erase(activationLayer->GetGuid());
}