aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/test
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/test')
-rw-r--r--src/armnn/test/CreateWorkload.hpp78
-rw-r--r--src/armnn/test/GraphTests.cpp12
-rw-r--r--src/armnn/test/NetworkTests.cpp28
-rw-r--r--src/armnn/test/QuantizerTest.cpp30
-rw-r--r--src/armnn/test/SubgraphViewTests.cpp64
-rw-r--r--src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp24
-rw-r--r--src/armnn/test/TestNameAndDescriptorLayerVisitor.hpp42
7 files changed, 135 insertions, 143 deletions
diff --git a/src/armnn/test/CreateWorkload.hpp b/src/armnn/test/CreateWorkload.hpp
index be52eadb57..135a4421cd 100644
--- a/src/armnn/test/CreateWorkload.hpp
+++ b/src/armnn/test/CreateWorkload.hpp
@@ -706,10 +706,10 @@ std::unique_ptr<SplitterWorkload>
return workload;
}
-/// This function constructs a graph with both a splitter and a merger, and returns a pair of the workloads.
-template<typename SplitterWorkload, typename MergerWorkload, armnn::DataType DataType>
-std::pair<std::unique_ptr<SplitterWorkload>, std::unique_ptr<MergerWorkload>>
- CreateSplitterMergerWorkloadTest(armnn::IWorkloadFactory& factory, armnn::Graph& graph)
+/// This function constructs a graph with both a splitter and a concat, and returns a pair of the workloads.
+template<typename SplitterWorkload, typename ConcatWorkload, armnn::DataType DataType>
+std::pair<std::unique_ptr<SplitterWorkload>, std::unique_ptr<ConcatWorkload>>
+ CreateSplitterConcatWorkloadTest(armnn::IWorkloadFactory &factory, armnn::Graph &graph)
{
armnn::TensorInfo inputTensorInfo({ 1, 2, 100, 10 }, DataType);
@@ -733,41 +733,41 @@ std::pair<std::unique_ptr<SplitterWorkload>, std::unique_ptr<MergerWorkload>>
Layer* const splitter = graph.AddLayer<SplitterLayer>(splitterViews, "splitter");
BOOST_TEST_CHECKPOINT("created splitter layer");
- armnn::OriginsDescriptor mergerViews(2);
- mergerViews.SetViewOriginCoord(0, 0, 0);
- mergerViews.SetViewOriginCoord(0, 1, 1);
- mergerViews.SetViewOriginCoord(0, 2, 0);
- mergerViews.SetViewOriginCoord(0, 3, 0);
+ armnn::OriginsDescriptor concatViews(2);
+ concatViews.SetViewOriginCoord(0, 0, 0);
+ concatViews.SetViewOriginCoord(0, 1, 1);
+ concatViews.SetViewOriginCoord(0, 2, 0);
+ concatViews.SetViewOriginCoord(0, 3, 0);
- mergerViews.SetViewOriginCoord(1, 0, 0);
- mergerViews.SetViewOriginCoord(1, 1, 0);
- mergerViews.SetViewOriginCoord(1, 2, 0);
- mergerViews.SetViewOriginCoord(1, 3, 0);
+ concatViews.SetViewOriginCoord(1, 0, 0);
+ concatViews.SetViewOriginCoord(1, 1, 0);
+ concatViews.SetViewOriginCoord(1, 2, 0);
+ concatViews.SetViewOriginCoord(1, 3, 0);
- Layer* const merger = graph.AddLayer<MergerLayer>(mergerViews, "merger");
- BOOST_TEST_CHECKPOINT("created merger layer");
+ Layer* const concat = graph.AddLayer<ConcatLayer>(concatViews, "concat");
+ BOOST_TEST_CHECKPOINT("created concat layer");
Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
// Adds connections.
Connect(input, splitter, inputTensorInfo, 0, 0);
BOOST_TEST_CHECKPOINT("connect input to splitter");
- Connect(splitter, merger, splitTensorInfo1, 0, 1); // The splitter & merger are connected up.
- BOOST_TEST_CHECKPOINT("connect splitter[0] to merger[1]");
- Connect(splitter, merger, splitTensorInfo2, 1, 0); // So that the outputs are flipped round.
- BOOST_TEST_CHECKPOINT("connect splitter[1] to merger[0]");
- Connect(merger, output, inputTensorInfo, 0, 0);
- BOOST_TEST_CHECKPOINT("connect merger to output");
+ Connect(splitter, concat, splitTensorInfo1, 0, 1); // The splitter & concat are connected up.
+ BOOST_TEST_CHECKPOINT("connect splitter[0] to concat[1]");
+ Connect(splitter, concat, splitTensorInfo2, 1, 0); // So that the outputs are flipped round.
+ BOOST_TEST_CHECKPOINT("connect splitter[1] to concat[0]");
+ Connect(concat, output, inputTensorInfo, 0, 0);
+ BOOST_TEST_CHECKPOINT("connect concat to output");
CreateTensorHandles(graph, factory);
BOOST_TEST_CHECKPOINT("created tensor handles");
auto workloadSplitter = MakeAndCheckWorkload<SplitterWorkload>(*splitter, graph, factory);
BOOST_TEST_CHECKPOINT("created splitter workload");
- auto workloadMerger = MakeAndCheckWorkload<MergerWorkload>(*merger, graph, factory);
- BOOST_TEST_CHECKPOINT("created merger workload");
+ auto workloadConcat = MakeAndCheckWorkload<ConcatWorkload>(*concat, graph, factory);
+ BOOST_TEST_CHECKPOINT("created concat workload");
- return {std::move(workloadSplitter), std::move(workloadMerger)};
+ return {std::move(workloadSplitter), std::move(workloadConcat)};
}
@@ -1053,10 +1053,10 @@ std::unique_ptr<MeanWorkload> CreateMeanWorkloadTest(armnn::IWorkloadFactory& fa
return workload;
}
-template<typename MergerWorkload, armnn::DataType DataType>
-std::unique_ptr<MergerWorkload> CreateMergerWorkloadTest(armnn::IWorkloadFactory& factory,
- armnn::Graph& graph,
- const armnn::TensorShape& outputShape,
+template<typename ConcatWorkload, armnn::DataType DataType>
+std::unique_ptr<ConcatWorkload> CreateConcatWorkloadTest(armnn::IWorkloadFactory &factory,
+ armnn::Graph &graph,
+ const armnn::TensorShape &outputShape,
unsigned int concatAxis)
{
armnn::TensorInfo inputTensorInfo({ 2, 3, 2, 5 }, DataType);
@@ -1073,26 +1073,26 @@ std::unique_ptr<MergerWorkload> CreateMergerWorkloadTest(armnn::IWorkloadFactory
inputShapes.end(),
concatAxis);
- Layer* const merger = graph.AddLayer<MergerLayer>(descriptor, "merger");
- BOOST_TEST_CHECKPOINT("created merger layer");
+ Layer* const concat = graph.AddLayer<ConcatLayer>(descriptor, "concat");
+ BOOST_TEST_CHECKPOINT("created concat layer");
Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
// Adds connections.
- Connect(input0, merger, inputTensorInfo, 0, 0);
- BOOST_TEST_CHECKPOINT("connect input0 to merger");
- Connect(input1, merger, inputTensorInfo, 0, 1);
- BOOST_TEST_CHECKPOINT("connect input1 to merger");
- Connect(merger, output, outputTensorInfo, 0, 0);
- BOOST_TEST_CHECKPOINT("connect merger to output");
+ Connect(input0, concat, inputTensorInfo, 0, 0);
+ BOOST_TEST_CHECKPOINT("connect input0 to concat");
+ Connect(input1, concat, inputTensorInfo, 0, 1);
+ BOOST_TEST_CHECKPOINT("connect input1 to concat");
+ Connect(concat, output, outputTensorInfo, 0, 0);
+ BOOST_TEST_CHECKPOINT("connect concat to output");
CreateTensorHandles(graph, factory);
BOOST_TEST_CHECKPOINT("created tensor handles");
- auto workloadMerger = MakeAndCheckWorkload<MergerWorkload>(*merger, graph, factory);
- BOOST_TEST_CHECKPOINT("created merger workload");
+ auto workloadConcat = MakeAndCheckWorkload<ConcatWorkload>(*concat, graph, factory);
+ BOOST_TEST_CHECKPOINT("created concat workload");
- return std::move(workloadMerger);
+ return std::move(workloadConcat);
}
template <typename PreCompiledWorkload, armnn::DataType dataType>
diff --git a/src/armnn/test/GraphTests.cpp b/src/armnn/test/GraphTests.cpp
index cca4653509..0777d98d3a 100644
--- a/src/armnn/test/GraphTests.cpp
+++ b/src/armnn/test/GraphTests.cpp
@@ -454,18 +454,18 @@ struct CopyLayersFixture
convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
- armnn::OriginsDescriptor mergerDefaults(2);
- Layer* const mergerLayer = AddLayer<MergerLayer>(mergerDefaults, "merger");
- mergerLayer->SetBackendId(armnn::Compute::CpuRef);
+ armnn::OriginsDescriptor concatDefaults(2);
+ Layer* const concatLayer = AddLayer<ConcatLayer>(concatDefaults, "concat");
+ concatLayer->SetBackendId(armnn::Compute::CpuRef);
- convLayer1->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
- convLayer2->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
+ convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
+ convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
armnn::ActivationDescriptor activationDefaults;
Layer* const actLayer = AddLayer<ActivationLayer>(activationDefaults, "act");
actLayer->SetBackendId(armnn::Compute::CpuRef);
- mergerLayer->GetOutputSlot(0).Connect(actLayer->GetInputSlot(0));
+ concatLayer->GetOutputSlot(0).Connect(actLayer->GetInputSlot(0));
armnn::SoftmaxDescriptor softmaxDefaults;
Layer* const softmaxLayer = AddLayer<SoftmaxLayer>(softmaxDefaults, "softmax");
diff --git a/src/armnn/test/NetworkTests.cpp b/src/armnn/test/NetworkTests.cpp
index 47fd67b8d4..14b67a1f4a 100644
--- a/src/armnn/test/NetworkTests.cpp
+++ b/src/armnn/test/NetworkTests.cpp
@@ -226,7 +226,7 @@ BOOST_AUTO_TEST_CASE(NetworkModification)
checkOneOutputToOneInputConnection(multiplicationLayer, outputLayer, 2, 0);
}
-BOOST_AUTO_TEST_CASE(NetworkModification_SplitterMerger)
+BOOST_AUTO_TEST_CASE(NetworkModification_SplitterConcat)
{
armnn::Network net;
@@ -255,22 +255,20 @@ BOOST_AUTO_TEST_CASE(NetworkModification_SplitterMerger)
splitterLayer->GetOutputSlot(1).Connect(softmaxLayer2->GetInputSlot(0));
- // Adds a merger layer.
- armnn::OriginsDescriptor mergerDesc(2, 4);
+ // Adds a concat layer.
+ armnn::OriginsDescriptor concatDesc(2, 4);
- ARMNN_NO_DEPRECATE_WARN_BEGIN
- armnn::IConnectableLayer* mergerLayer = net.AddMergerLayer(mergerDesc, "merger layer");
- ARMNN_NO_DEPRECATE_WARN_END
- BOOST_TEST(mergerLayer);
+ armnn::IConnectableLayer* concatLayer = net.AddConcatLayer(concatDesc, "concat layer");
+ BOOST_TEST(concatLayer);
- softmaxLayer1->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
- softmaxLayer2->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
+ softmaxLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
+ softmaxLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
// Adds an output layer.
armnn::IConnectableLayer* outputLayer = net.AddOutputLayer(0, "output layer");
BOOST_TEST(outputLayer);
- mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+ concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
BOOST_TEST(splitterLayer->GetNumOutputSlots() == 2);
BOOST_TEST(splitterLayer->GetOutputSlot(0).GetConnection(0) == &softmaxLayer1->GetInputSlot(0));
@@ -278,11 +276,11 @@ BOOST_AUTO_TEST_CASE(NetworkModification_SplitterMerger)
BOOST_TEST(splitterLayer->GetOutputSlot(1).GetConnection(0) == &softmaxLayer2->GetInputSlot(0));
BOOST_TEST(&splitterLayer->GetOutputSlot(1) == softmaxLayer2->GetInputSlot(0).GetConnection());
- BOOST_TEST(mergerLayer->GetNumInputSlots() == 2);
- BOOST_TEST(softmaxLayer1->GetOutputSlot(0).GetConnection(0) == &mergerLayer->GetInputSlot(0));
- BOOST_TEST(&softmaxLayer1->GetOutputSlot(0) == mergerLayer->GetInputSlot(0).GetConnection());
- BOOST_TEST(softmaxLayer2->GetOutputSlot(0).GetConnection(0) == &mergerLayer->GetInputSlot(1));
- BOOST_TEST(&softmaxLayer2->GetOutputSlot(0) == mergerLayer->GetInputSlot(1).GetConnection());
+ BOOST_TEST(concatLayer->GetNumInputSlots() == 2);
+ BOOST_TEST(softmaxLayer1->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(0));
+ BOOST_TEST(&softmaxLayer1->GetOutputSlot(0) == concatLayer->GetInputSlot(0).GetConnection());
+ BOOST_TEST(softmaxLayer2->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(1));
+ BOOST_TEST(&softmaxLayer2->GetOutputSlot(0) == concatLayer->GetInputSlot(1).GetConnection());
}
BOOST_AUTO_TEST_CASE(NetworkModification_SplitterAddition)
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index f2c739d274..337c61585f 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -1238,15 +1238,15 @@ BOOST_AUTO_TEST_CASE(QuantizeConstant)
VisitLayersTopologically(quantizedNetworkQSymm16.get(), validatorQSymm16);
}
-BOOST_AUTO_TEST_CASE(QuantizeMerger)
+BOOST_AUTO_TEST_CASE(QuantizeConcat)
{
- class TestMergerQuantization : public TestQuantization
+ class TestConcatQuantization : public TestQuantization
{
public:
- TestMergerQuantization(const TensorShape& inputShape, const TensorShape& outputShape)
+ TestConcatQuantization(const TensorShape& inputShape, const TensorShape& outputShape)
: TestQuantization(inputShape, outputShape) {}
- TestMergerQuantization(const QuantizerOptions& options,
+ TestConcatQuantization(const QuantizerOptions& options,
const TensorShape& inputShape,
const TensorShape& outputShape)
: TestQuantization(options, inputShape, outputShape) {}
@@ -1259,8 +1259,8 @@ BOOST_AUTO_TEST_CASE(QuantizeMerger)
LayerBindingId id,
const char* name = nullptr) override
{}
- void VisitMergerLayer(const IConnectableLayer* layer,
- const OriginsDescriptor& mergerDescriptor,
+ void VisitConcatLayer(const IConnectableLayer* layer,
+ const OriginsDescriptor& originsDescriptor,
const char* name = nullptr) override
{
TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
@@ -1277,17 +1277,15 @@ BOOST_AUTO_TEST_CASE(QuantizeMerger)
IConnectableLayer* input2 = network->AddInputLayer(2);
OriginsDescriptor descriptor(3, 1);
- ARMNN_NO_DEPRECATE_WARN_BEGIN
- IConnectableLayer* merger = network->AddMergerLayer(descriptor);
- ARMNN_NO_DEPRECATE_WARN_END
+ IConnectableLayer* concatLayer = network->AddConcatLayer(descriptor);
IConnectableLayer* output0 = network->AddOutputLayer(3);
// Establish connections
- input0->GetOutputSlot(0).Connect(merger->GetInputSlot(0));
- input1->GetOutputSlot(0).Connect(merger->GetInputSlot(1));
- input2->GetOutputSlot(0).Connect(merger->GetInputSlot(2));
- merger->GetOutputSlot(0).Connect(output0->GetInputSlot(0));
+ input0->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
+ input1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
+ input2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(2));
+ concatLayer->GetOutputSlot(0).Connect(output0->GetInputSlot(0));
// Set TensorInfo
const TensorShape shape{1U};
@@ -1296,7 +1294,7 @@ BOOST_AUTO_TEST_CASE(QuantizeMerger)
input0->GetOutputSlot(0).SetTensorInfo(info);
input1->GetOutputSlot(0).SetTensorInfo(info);
input2->GetOutputSlot(0).SetTensorInfo(info);
- merger->GetOutputSlot(0).SetTensorInfo(info);
+ concatLayer->GetOutputSlot(0).SetTensorInfo(info);
const QuantizerOptions options(DataType::QuantisedSymm16);
INetworkQuantizerPtr quantizerPtrQAsymm8 = INetworkQuantizer::Create(network.get());
@@ -1314,11 +1312,11 @@ BOOST_AUTO_TEST_CASE(QuantizeMerger)
quantizerPtrQSymm16->OverrideInputRange(2, min, (max - 7.8f));
INetworkPtr quantizedNetworkQAsymm8 = quantizerPtrQAsymm8->ExportNetwork();
- TestMergerQuantization validatorQAsymm8(shape, shape);
+ TestConcatQuantization validatorQAsymm8(shape, shape);
VisitLayersTopologically(quantizedNetworkQAsymm8.get(), validatorQAsymm8);
INetworkPtr quantizedNetworkQSymm16 = quantizerPtrQSymm16->ExportNetwork();
- TestMergerQuantization validatorQSymm16(options, shape, shape);
+ TestConcatQuantization validatorQSymm16(options, shape, shape);
VisitLayersTopologically(quantizedNetworkQSymm16.get(), validatorQSymm16);
}
diff --git a/src/armnn/test/SubgraphViewTests.cpp b/src/armnn/test/SubgraphViewTests.cpp
index 8369fc4c79..3e762e2de5 100644
--- a/src/armnn/test/SubgraphViewTests.cpp
+++ b/src/armnn/test/SubgraphViewTests.cpp
@@ -223,21 +223,21 @@ BOOST_AUTO_TEST_CASE(MultiInputSingleOutput)
Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
- OriginsDescriptor mergerDescriptor(2);
- Layer* const mergerLayer = graph.AddLayer<MergerLayer>(mergerDescriptor, "merger");
+ OriginsDescriptor concatDescriptor(2);
+ Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
- convLayer1->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
- convLayer2->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
- mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+ convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
+ convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
+ concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
// Construct sub-graph
SubgraphViewSelector::SubgraphViewPtr subgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1, convLayer2}),
- CreateOutputsFrom({mergerLayer}),
+ CreateOutputsFrom({concatLayer}),
{});
// Save sub-graph connections for comparison after substitution
@@ -270,8 +270,8 @@ BOOST_AUTO_TEST_CASE(SingleInputMultiOutput)
Convolution2dDescriptor convDescriptor;
Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
- OriginsDescriptor mergerDescriptor(2);
- Layer* const mergerLayer = graph.AddLayer<MergerLayer>(mergerDescriptor, "merger");
+ OriginsDescriptor concatDescriptor(2);
+ Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
ViewsDescriptor splitterDescriptor(2);
@@ -280,9 +280,9 @@ BOOST_AUTO_TEST_CASE(SingleInputMultiOutput)
inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
- convLayer1->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
- convLayer2->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
- mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+ convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
+ convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
+ concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
// Construct sub-graph
SubgraphViewSelector::SubgraphViewPtr subgraph = CreateSubgraphViewFrom(CreateInputsFrom({splitterLayer}),
@@ -323,17 +323,17 @@ BOOST_AUTO_TEST_CASE(MultiInputMultiOutput)
Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
- OriginsDescriptor mergerDescriptor(2);
- Layer* const mergerLayer = graph.AddLayer<MergerLayer>(mergerDescriptor, "merger");
+ OriginsDescriptor concatDescriptor(2);
+ Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
- convLayer1->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
- convLayer2->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
- mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+ convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
+ convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
+ concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
// Construct sub-graph
SubgraphViewSelector::SubgraphViewPtr subgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1, convLayer2}),
@@ -376,8 +376,8 @@ BOOST_AUTO_TEST_CASE(EraseReplacedLayers)
Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
- OriginsDescriptor mergerDescriptor(2);
- Layer* const mergerLayer = graph.AddLayer<MergerLayer>(mergerDescriptor, "merger");
+ OriginsDescriptor concatDescriptor(2);
+ Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
graph.AddLayer<OutputLayer>(0, "output");
@@ -387,7 +387,7 @@ BOOST_AUTO_TEST_CASE(EraseReplacedLayers)
{splitterLayer,
convLayer1,
convLayer2,
- mergerLayer});
+ concatLayer});
// Construct dummy pre-compiled layer
PreCompiledDescriptor preCompiledDescriptor(0, 0);
@@ -538,8 +538,8 @@ BOOST_AUTO_TEST_CASE(IslandInTheMiddle)
//
Graph graph;
- OriginsDescriptor mergerDescriptor(2);
- auto x2 = graph.AddLayer<MergerLayer>(mergerDescriptor, "x2");
+ OriginsDescriptor concatDescriptor(2);
+ auto x2 = graph.AddLayer<ConcatLayer>(concatDescriptor, "x2");
auto m3 = graph.InsertNewLayer<ActivationLayer>(x2->GetInputSlot(0),
ActivationDescriptor{},
"m3");
@@ -856,14 +856,14 @@ BOOST_AUTO_TEST_CASE(MultiInputMultiOutput)
Graph graph;
ActivationDescriptor activationDefaults;
- OriginsDescriptor mergerDescriptor(2);
+ OriginsDescriptor concatDescriptor(2);
auto x1 = graph.AddLayer<InputLayer>(0, "x1");
auto x2 = graph.AddLayer<InputLayer>(1, "x2");
auto m1 = graph.AddLayer<ActivationLayer>(activationDefaults, "m1");
auto m2 = graph.AddLayer<ActivationLayer>(activationDefaults, "m2");
- auto m3 = graph.AddLayer<MergerLayer>(mergerDescriptor, "m3");
+ auto m3 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m3");
auto m4 = graph.AddLayer<ActivationLayer>(activationDefaults, "m4");
auto m5 = graph.AddLayer<ActivationLayer>(activationDefaults, "m5");
@@ -887,11 +887,11 @@ BOOST_AUTO_TEST_CASE(MultiInputMultiOutput)
SubgraphViewSelector::Subgraphs subgraphs =
SubgraphViewSelector::SelectSubgraphs(
graph,
- // select Activation and Merger Layers M1, M2, M3, M4, M5
+ // select Activation and Concat Layers M1, M2, M3, M4, M5
[](const Layer & l)
{
bool toSelect = (l.GetType() == LayerType::Activation
- || l.GetType() == LayerType::Merger);
+ || l.GetType() == LayerType::Concat);
return toSelect;
});
@@ -994,18 +994,18 @@ BOOST_AUTO_TEST_CASE(MultipleSubgraphs)
Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
- OriginsDescriptor mergerDescriptor(2);
- Layer* const mergerLayer = graph.AddLayer<MergerLayer>(mergerDescriptor, "merger");
- mergerLayer->SetBackendId(Compute::CpuAcc);
+ OriginsDescriptor concatDescriptor(2);
+ Layer* const pConcatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
+ pConcatLayer->SetBackendId(Compute::CpuAcc);
Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
- convLayer1->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(0));
- convLayer2->GetOutputSlot(0).Connect(mergerLayer->GetInputSlot(1));
- mergerLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+ convLayer1->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(0));
+ convLayer2->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(1));
+ pConcatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
// CpuAcc sub graph selector
SubgraphViewSelector::Subgraphs subgraphs =
@@ -1096,7 +1096,7 @@ BOOST_AUTO_TEST_CASE(SubgraphCycles)
//
Graph graph;
- OriginsDescriptor mergerDescriptor(2);
+ OriginsDescriptor originsDescriptor(2);
auto x0 = graph.AddLayer<InputLayer>(0, "x0");
auto m0 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m0");
auto x1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x1");
diff --git a/src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp b/src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp
index f94906d10d..478f0293a4 100644
--- a/src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp
+++ b/src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp
@@ -20,7 +20,7 @@ void Set2dDataValues(SplitterDescriptor descriptor, u_int32_t value)
}
}
-void Set2dDataValues(MergerDescriptor descriptor, u_int32_t value)
+void Set2dDataValues(OriginsDescriptor& descriptor, u_int32_t value)
{
for (unsigned int i = 0; i < descriptor.GetNumViews(); ++i)
{
@@ -230,32 +230,28 @@ BOOST_AUTO_TEST_CASE(CheckSplitterLayerVisitorNameNullAndDescriptor)
layer->Accept(visitor);
}
-BOOST_AUTO_TEST_CASE(CheckMergerLayerVisitorNameAndDescriptor)
+BOOST_AUTO_TEST_CASE(CheckConcatLayerVisitorNameAndDescriptor)
{
- const char* layerName = "MergerLayer";
- MergerDescriptor descriptor(2, 2);
+ const char* layerName = "ConcatLayer";
+ OriginsDescriptor descriptor(2, 2);
Set2dDataValues(descriptor, 1);
descriptor.SetConcatAxis(1);
- TestMergerLayerVisitor visitor(descriptor, layerName);
+ TestConcatLayerVisitor visitor(descriptor, layerName);
Network net;
- ARMNN_NO_DEPRECATE_WARN_BEGIN
- IConnectableLayer *const layer = net.AddMergerLayer(descriptor, layerName);
- ARMNN_NO_DEPRECATE_WARN_END
+ IConnectableLayer *const layer = net.AddConcatLayer(descriptor, layerName);
layer->Accept(visitor);
}
-BOOST_AUTO_TEST_CASE(CheckMergerLayerVisitorNameNullAndDescriptor)
+BOOST_AUTO_TEST_CASE(CheckConcatLayerVisitorNameNullAndDescriptor)
{
- MergerDescriptor descriptor(2, 2);
+ OriginsDescriptor descriptor(2, 2);
Set2dDataValues(descriptor, 1);
descriptor.SetConcatAxis(1);
- TestMergerLayerVisitor visitor(descriptor);
+ TestConcatLayerVisitor visitor(descriptor);
Network net;
- ARMNN_NO_DEPRECATE_WARN_BEGIN
- IConnectableLayer *const layer = net.AddMergerLayer(descriptor);
- ARMNN_NO_DEPRECATE_WARN_END
+ IConnectableLayer *const layer = net.AddConcatLayer(descriptor);
layer->Accept(visitor);
}
diff --git a/src/armnn/test/TestNameAndDescriptorLayerVisitor.hpp b/src/armnn/test/TestNameAndDescriptorLayerVisitor.hpp
index bf23332fb8..0db956d36d 100644
--- a/src/armnn/test/TestNameAndDescriptorLayerVisitor.hpp
+++ b/src/armnn/test/TestNameAndDescriptorLayerVisitor.hpp
@@ -317,70 +317,70 @@ public:
};
};
-class TestMergerLayerVisitor : public TestLayerVisitor
+class TestConcatLayerVisitor : public TestLayerVisitor
{
private:
OriginsDescriptor m_VisitorDescriptor;
public:
- explicit TestMergerLayerVisitor(const OriginsDescriptor& mergerDescriptor, const char* name = nullptr)
+ explicit TestConcatLayerVisitor(const OriginsDescriptor& concatDescriptor, const char* name = nullptr)
: TestLayerVisitor(name)
- , m_VisitorDescriptor(mergerDescriptor.GetNumViews(), mergerDescriptor.GetNumDimensions())
+ , m_VisitorDescriptor(concatDescriptor.GetNumViews(), concatDescriptor.GetNumDimensions())
{
- m_VisitorDescriptor.SetConcatAxis(mergerDescriptor.GetConcatAxis());
+ m_VisitorDescriptor.SetConcatAxis(concatDescriptor.GetConcatAxis());
- if (mergerDescriptor.GetNumViews() != m_VisitorDescriptor.GetNumViews())
+ if (concatDescriptor.GetNumViews() != m_VisitorDescriptor.GetNumViews())
{
BOOST_ERROR("Unequal number of views in splitter descriptor.");
}
- else if (mergerDescriptor.GetNumDimensions() != m_VisitorDescriptor.GetNumDimensions())
+ else if (concatDescriptor.GetNumDimensions() != m_VisitorDescriptor.GetNumDimensions())
{
BOOST_ERROR("Unequal number of dimensions in splitter descriptor.");
}
else
{
- for (unsigned int i = 0; i < mergerDescriptor.GetNumViews(); ++i)
+ for (unsigned int i = 0; i < concatDescriptor.GetNumViews(); ++i)
{
- for (unsigned int j = 0; j < mergerDescriptor.GetNumDimensions(); ++j)
+ for (unsigned int j = 0; j < concatDescriptor.GetNumDimensions(); ++j)
{
- m_VisitorDescriptor.SetViewOriginCoord(i, j, mergerDescriptor.GetViewOrigin(i)[j]);
+ m_VisitorDescriptor.SetViewOriginCoord(i, j, concatDescriptor.GetViewOrigin(i)[j]);
}
}
}
};
- void CheckDescriptor(const OriginsDescriptor& mergerDescriptor)
+ void CheckDescriptor(const OriginsDescriptor& concatDescriptor)
{
- BOOST_CHECK_EQUAL(mergerDescriptor.GetNumViews(), m_VisitorDescriptor.GetNumViews());
- BOOST_CHECK_EQUAL(mergerDescriptor.GetNumDimensions(), m_VisitorDescriptor.GetNumDimensions());
- BOOST_CHECK_EQUAL(mergerDescriptor.GetConcatAxis(), m_VisitorDescriptor.GetConcatAxis());
+ BOOST_CHECK_EQUAL(concatDescriptor.GetNumViews(), m_VisitorDescriptor.GetNumViews());
+ BOOST_CHECK_EQUAL(concatDescriptor.GetNumDimensions(), m_VisitorDescriptor.GetNumDimensions());
+ BOOST_CHECK_EQUAL(concatDescriptor.GetConcatAxis(), m_VisitorDescriptor.GetConcatAxis());
- if (mergerDescriptor.GetNumViews() != m_VisitorDescriptor.GetNumViews())
+ if (concatDescriptor.GetNumViews() != m_VisitorDescriptor.GetNumViews())
{
BOOST_ERROR("Unequal number of views in splitter descriptor.");
}
- else if (mergerDescriptor.GetNumDimensions() != m_VisitorDescriptor.GetNumDimensions())
+ else if (concatDescriptor.GetNumDimensions() != m_VisitorDescriptor.GetNumDimensions())
{
BOOST_ERROR("Unequal number of dimensions in splitter descriptor.");
}
else
{
- for (unsigned int i = 0; i < mergerDescriptor.GetNumViews(); ++i)
+ for (unsigned int i = 0; i < concatDescriptor.GetNumViews(); ++i)
{
- for (unsigned int j = 0; j < mergerDescriptor.GetNumDimensions(); ++j)
+ for (unsigned int j = 0; j < concatDescriptor.GetNumDimensions(); ++j)
{
- BOOST_CHECK_EQUAL(mergerDescriptor.GetViewOrigin(i)[j], m_VisitorDescriptor.GetViewOrigin(i)[j]);
+ BOOST_CHECK_EQUAL(concatDescriptor.GetViewOrigin(i)[j], m_VisitorDescriptor.GetViewOrigin(i)[j]);
}
}
}
}
- void VisitMergerLayer(const IConnectableLayer* layer,
- const OriginsDescriptor& mergerDescriptor,
+ void VisitConcatLayer(const IConnectableLayer* layer,
+ const OriginsDescriptor& concatDescriptor,
const char* name = nullptr) override
{
CheckLayerPointer(layer);
- CheckDescriptor(mergerDescriptor);
+ CheckDescriptor(concatDescriptor);
CheckLayerName(name);
};
};