aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/test/optimizations
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/test/optimizations')
-rw-r--r--src/armnn/test/optimizations/FuseActivationTests.cpp6
-rw-r--r--src/armnn/test/optimizations/FuseBatchNormTests.cpp4
-rw-r--r--src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp73
3 files changed, 71 insertions, 12 deletions
diff --git a/src/armnn/test/optimizations/FuseActivationTests.cpp b/src/armnn/test/optimizations/FuseActivationTests.cpp
index c8adea2132..71a554b567 100644
--- a/src/armnn/test/optimizations/FuseActivationTests.cpp
+++ b/src/armnn/test/optimizations/FuseActivationTests.cpp
@@ -345,7 +345,7 @@ void FuseActivationIntoPreviousLayerTest(ActivationDescriptor activationDescript
// Optimise ArmNN network
IOptimizedNetworkPtr optNetFused = Optimize(*networkFused, {backendId}, run->GetDeviceSpec());
- Graph graphFused = PolymorphicDowncast<OptimizedNetwork*>(optNetFused.get())->GetGraph();
+ Graph& graphFused = GetGraphForTesting(optNetFused.get());
auto checkFusedConv2d = [](const Layer* const layer)->bool {
return IsLayerOfType<LayerType>(layer) &&
@@ -386,7 +386,7 @@ void FuseActivationIntoPreviousLayerTest(ActivationDescriptor activationDescript
// Optimise ArmNN network
IOptimizedNetworkPtr optNetNotFused = Optimize(*networkNotFused, {backendId}, runNotFused->GetDeviceSpec());
- Graph graphNotFused = PolymorphicDowncast<OptimizedNetwork*>(optNetNotFused.get())->GetGraph();
+ Graph& graphNotFused = GetGraphForTesting(optNetNotFused.get());
BOOST_CHECK(5 == graphNotFused.GetNumLayers());
BOOST_TEST(CheckSequence(graphNotFused.cbegin(),
@@ -443,8 +443,6 @@ bool FuseActivationSimpleTest(ActivationDescriptor activationDescriptor, Compute
// Optimise ArmNN network
IOptimizedNetworkPtr optNetFused = Optimize(*networkFused, {backendId}, run->GetDeviceSpec());
- Graph graphFused = PolymorphicDowncast<OptimizedNetwork*>(optNetFused.get())->GetGraph();
-
// Load network into runtime
NetworkId networkIdentifier;
BOOST_TEST(run->LoadNetwork(networkIdentifier, std::move(optNetFused)) == Status::Success);
diff --git a/src/armnn/test/optimizations/FuseBatchNormTests.cpp b/src/armnn/test/optimizations/FuseBatchNormTests.cpp
index bf47c577a4..be66c5e4af 100644
--- a/src/armnn/test/optimizations/FuseBatchNormTests.cpp
+++ b/src/armnn/test/optimizations/FuseBatchNormTests.cpp
@@ -186,7 +186,7 @@ void FuseBatchNormIntoConvTest(bool depthwise, float tolerance, armnn::Compute b
// Optimise ArmNN network
IOptimizedNetworkPtr optNetFused = Optimize(*networkFused, {backendId}, run->GetDeviceSpec());
- Graph graphFused = PolymorphicDowncast<OptimizedNetwork*>(optNetFused.get())->GetGraph();
+ Graph& graphFused = GetGraphForTesting(optNetFused.get());
auto checkFusedConv2d = [ ](const armnn::Layer* const layer) -> bool
{
@@ -233,7 +233,7 @@ void FuseBatchNormIntoConvTest(bool depthwise, float tolerance, armnn::Compute b
// Optimise ArmNN network
IOptimizedNetworkPtr optNetNotFused = Optimize(*networkNotFused, {backendId}, runNotFused->GetDeviceSpec());
- Graph graphNotFused = PolymorphicDowncast<OptimizedNetwork*>(optNetNotFused.get())->GetGraph();
+ Graph& graphNotFused = GetGraphForTesting(optNetNotFused.get());
BOOST_CHECK(5 == graphNotFused.GetNumLayers());
BOOST_TEST(CheckSequence(graphNotFused.cbegin(),
diff --git a/src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp b/src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp
index 6bfd7e301f..b47e3c7296 100644
--- a/src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp
+++ b/src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp
@@ -50,6 +50,36 @@ INetworkPtr CreateTestNetwork()
}
/// Shared function for the below tests, so that we test the same network in both cases.
+std::unique_ptr<NetworkImpl> CreateTestNetworkImpl()
+{
+ std::unique_ptr<NetworkImpl> network(new NetworkImpl());
+
+ auto input = network->AddInputLayer(0, "input");
+ const TensorInfo inputInfo({ 1, 2, 3, 4 }, DataType::Float32);
+ input->GetOutputSlot(0).SetTensorInfo(inputInfo);
+
+ // Insert Permute which swaps batches and channels dimensions
+ auto permute = network->AddPermuteLayer(PermuteDescriptor(PermutationVector{ 3, 1, 2, 0 }), "permute");
+ const TensorInfo permuteInfo({ 4, 2, 3, 1 }, DataType::Float32);
+ permute->GetOutputSlot(0).SetTensorInfo(permuteInfo);
+ input->GetOutputSlot(0).Connect(permute->GetInputSlot(0));
+
+ // Insert BatchToSpace
+ BatchToSpaceNdDescriptor batchToSpaceDesc;
+ batchToSpaceDesc.m_BlockShape = { 2, 2 };
+ batchToSpaceDesc.m_DataLayout = DataLayout::NHWC;
+ auto batchToSpace = network->AddBatchToSpaceNdLayer(batchToSpaceDesc, "batchToSpace");
+ const TensorInfo batchToSpaceInfo({ 1, 4, 6, 1 }, DataType::Float32);
+ batchToSpace->GetOutputSlot(0).SetTensorInfo(batchToSpaceInfo);
+ permute->GetOutputSlot(0).Connect(batchToSpace->GetInputSlot(0));
+
+ auto output = network->AddOutputLayer(0, "output");
+ batchToSpace->GetOutputSlot(0).Connect(output->GetInputSlot(0));
+
+ return network;
+}
+
+/// Shared function for the below tests, so that we test the same network in both cases.
INetworkPtr CreateTransposeTestNetwork()
{
// Create a network
@@ -80,14 +110,45 @@ INetworkPtr CreateTransposeTestNetwork()
return network;
}
+/// Shared function for the below tests, so that we test the same network in both cases.
+std::unique_ptr<NetworkImpl> CreateTransposeTestNetworkImpl()
+{
+ // Create a network
+ std::unique_ptr<NetworkImpl> network(new NetworkImpl());
+
+ auto input = network->AddInputLayer(0, "input");
+ const TensorInfo inputInfo({ 1, 2, 3, 4 }, DataType::Float32);
+ input->GetOutputSlot(0).SetTensorInfo(inputInfo);
+
+ // Insert Permute which swaps batches and channels dimensions
+ auto permute = network->AddTransposeLayer(TransposeDescriptor(PermutationVector{ 3, 1, 2, 0 }), "permute");
+ const TensorInfo permuteInfo({ 4, 2, 3, 1 }, DataType::Float32);
+ permute->GetOutputSlot(0).SetTensorInfo(permuteInfo);
+ input->GetOutputSlot(0).Connect(permute->GetInputSlot(0));
+
+ // Insert BatchToSpace
+ BatchToSpaceNdDescriptor batchToSpaceDesc;
+ batchToSpaceDesc.m_BlockShape = { 2, 2 };
+ batchToSpaceDesc.m_DataLayout = DataLayout::NHWC;
+ auto batchToSpace = network->AddBatchToSpaceNdLayer(batchToSpaceDesc, "batchToSpace");
+ const TensorInfo batchToSpaceInfo({ 1, 4, 6, 1 }, DataType::Float32);
+ batchToSpace->GetOutputSlot(0).SetTensorInfo(batchToSpaceInfo);
+ permute->GetOutputSlot(0).Connect(batchToSpace->GetInputSlot(0));
+
+ auto output = network->AddOutputLayer(0, "output");
+ batchToSpace->GetOutputSlot(0).Connect(output->GetInputSlot(0));
+
+ return network;
+}
+
} // namespace
/// Tests that the optimization performed by PermuteAndBatchToSpaceAsDepthToSpace is as expected.
/// Note this does not ensure the correctness of the optimization - that is done in the below test.
BOOST_AUTO_TEST_CASE(PermuteAndBatchToSpaceAsDepthToSpaceOptimizerTest)
{
- INetworkPtr network = CreateTestNetwork();
- Graph graph = static_cast<Network*>(network.get())->GetGraph();
+ std::unique_ptr<NetworkImpl> network = CreateTestNetworkImpl();
+ Graph graph = network.get()->GetGraph();
// Confirm initial graph is as we expect
BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<InputLayer>, &IsLayerOfType<PermuteLayer>,
@@ -116,8 +177,8 @@ BOOST_AUTO_TEST_CASE(PermuteAndBatchToSpaceAsDepthToSpaceOptimizerTest)
/// Note this does not ensure the correctness of the optimization - that is done in the below test.
BOOST_AUTO_TEST_CASE(TransposeAndBatchToSpaceAsDepthToSpaceOptimizerTest)
{
- INetworkPtr network = CreateTransposeTestNetwork();
- Graph graph = static_cast<Network*>(network.get())->GetGraph();
+ std::unique_ptr<NetworkImpl> network = CreateTransposeTestNetworkImpl();
+ Graph graph = network.get()->GetGraph();
// Confirm initial graph is as we expect
BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<InputLayer>, &IsLayerOfType<TransposeLayer>,
@@ -155,7 +216,7 @@ BOOST_AUTO_TEST_CASE(PermuteAndBatchToSpaceAsDepthToSpaceCorrectnessTest)
IOptimizedNetworkPtr optimizedNetwork = Optimize(*network, { Compute::CpuRef }, runtime->GetDeviceSpec());
// Confirm that the optimization has actually taken place
- const Graph& optGraph = static_cast<OptimizedNetwork*>(optimizedNetwork.get())->GetGraph();
+ const Graph& optGraph = GetGraphForTesting(optimizedNetwork.get());
BOOST_TEST(CheckSequence(optGraph.cbegin(), optGraph.cend(), &IsLayerOfType<InputLayer>,
&IsLayerOfType<DepthToSpaceLayer>, &IsLayerOfType<OutputLayer>));
@@ -202,7 +263,7 @@ BOOST_AUTO_TEST_CASE(TransposeAndBatchToSpaceAsDepthToSpaceCorrectnessTest)
IOptimizedNetworkPtr optimizedNetwork = Optimize(*network, { Compute::CpuRef }, runtime->GetDeviceSpec());
// Confirm that the optimization has actually taken place
- const Graph& optGraph = static_cast<OptimizedNetwork*>(optimizedNetwork.get())->GetGraph();
+ const Graph& optGraph = GetGraphForTesting(optimizedNetwork.get());
BOOST_TEST(CheckSequence(optGraph.cbegin(), optGraph.cend(), &IsLayerOfType<InputLayer>,
&IsLayerOfType<DepthToSpaceLayer>, &IsLayerOfType<OutputLayer>));